private static void VerifyIncrementalResult(HashAlgorithm referenceAlgorithm, IncrementalHash incrementalHash) { byte[] referenceHash = referenceAlgorithm.ComputeHash(s_inputBytes); const int StepA = 13; const int StepB = 7; int position = 0; while (position < s_inputBytes.Length - StepA) { incrementalHash.AppendData(s_inputBytes, position, StepA); position += StepA; } incrementalHash.AppendData(s_inputBytes, position, s_inputBytes.Length - position); byte[] incrementalA = incrementalHash.GetHashAndReset(); Assert.Equal(referenceHash, incrementalA); // Now try again, verifying both immune to step size behaviors, and that GetHashAndReset resets. position = 0; while (position < s_inputBytes.Length - StepB) { incrementalHash.AppendData(s_inputBytes, position, StepA); position += StepA; } incrementalHash.AppendData(s_inputBytes, position, s_inputBytes.Length - position); byte[] incrementalB = incrementalHash.GetHashAndReset(); Assert.Equal(referenceHash, incrementalB); }
public static void Dispose_HMAC_ThrowsException(HMAC referenceAlgorithm, HashAlgorithmName hashAlgorithm) { referenceAlgorithm.Dispose(); var incrementalHash = IncrementalHash.CreateHMAC(hashAlgorithm, s_hmacKey); incrementalHash.Dispose(); Assert.Throws <ObjectDisposedException>(() => incrementalHash.AppendData(new byte[1])); Assert.Throws <ObjectDisposedException>(() => incrementalHash.AppendData(new ReadOnlySpan <byte>(new byte[1]))); Assert.Throws <ObjectDisposedException>(() => incrementalHash.GetHashAndReset()); Assert.Throws <ObjectDisposedException>(() => incrementalHash.TryGetHashAndReset(new byte[1], out int _)); }
internal AttachmentCipherOutputStream(byte[] combinedKeyMaterial, Stream outputStream) : base(outputStream) { byte[][] keyParts = Util.Split(combinedKeyMaterial, 32, 32); Aes = Aes.Create(); Aes.Key = keyParts[0]; Aes.Mode = CipherMode.CBC; Aes.Padding = PaddingMode.PKCS7; Encryptor = Aes.CreateEncryptor(); Cipher = new CryptoStream(TmpStream, Encryptor, CryptoStreamMode.Write); Mac = IncrementalHash.CreateHMAC(HashAlgorithmName.SHA256, keyParts[1]); Mac.AppendData(Aes.IV); base.Write(Aes.IV, 0, Aes.IV.Length); }
/// <summary> /// Retrieve hash of file contents, using the specified provider /// </summary> public async Task <object> GetFileHash(IncrementalHash hashprovider) { // If hash has not already been calculated and no decryption stream is in use, calculate // hash now and reset stream position to zero (if decryption stream has been created, // we don't want to mess with the underlying filestream): if (fileHash == null && decryptionstream == null) { fileHash = await TaskUtilities.Streams.GetStreamHash(filestream, hashprovider); filestream.Position = 0; } return(fileHash); }
public virtual bool TryGetCertHash( HashAlgorithmName hashAlgorithm, Span <byte> destination, out int bytesWritten) { ThrowIfInvalid(); using (IncrementalHash hasher = IncrementalHash.CreateHash(hashAlgorithm)) { hasher.AppendData(Pal !.RawData); return(hasher.TryGetHashAndReset(destination, out bytesWritten)); } }
public HasherStream(Stream inner, HashAlgorithmName hashAlgorithmName) { Guard.NotNull(inner, nameof(inner)); if (!inner.CanRead) { throw new ArgumentException("Inner stream must be readable."); } this.inner = inner; hasher = IncrementalHash.CreateHash(hashAlgorithmName); }
public static void VerifyTrivialHMAC(HMAC referenceAlgorithm, HashAlgorithmName hashAlgorithm) { using (referenceAlgorithm) using (IncrementalHash incrementalHash = IncrementalHash.CreateHMAC(hashAlgorithm, s_hmacKey)) { referenceAlgorithm.Key = s_hmacKey; byte[] referenceHash = referenceAlgorithm.ComputeHash(Array.Empty <byte>()); byte[] incrementalResult = incrementalHash.GetHashAndReset(); Assert.Equal(referenceHash, incrementalResult); } }
/// <summary> /// Creates a name-based UUID using the algorithm from RFC 4122 §4.3. /// </summary> /// <param name="namespaceId">The ID of the namespace.</param> /// <param name="name">The name (within that namespace).</param> /// <param name="version">The version number of the UUID to create; this value must be either /// 3 (for MD5 hashing) or 5 (for SHA-1 hashing).</param> /// <returns>A UUID derived from the namespace and name.</returns> /// <remarks>See <a href="http://code.logos.com/blog/2011/04/generating_a_deterministic_guid.html">Generating a deterministic GUID</a>.</remarks> public static Guid Create(Guid namespaceId, string name, int version) { if (name == null) { throw new ArgumentNullException("name"); } if (version != 3 && version != 5) { throw new ArgumentOutOfRangeException("version", "version must be either 3 or 5."); } // convert the name to a sequence of octets (as defined by the standard or conventions of its namespace) (step 3) // ASSUME: UTF-8 encoding is always appropriate byte[] nameBytes = Encoding.UTF8.GetBytes(name); // convert the namespace UUID to network order (step 3) byte[] namespaceBytes = namespaceId.ToByteArray(); GuidCreator.SwapByteOrder(namespaceBytes); // comput the hash of the name space ID concatenated with the name (step 4) byte[] hash; using (var algorithm = version == 3 ? (HashAlgorithm)MD5.Create() : (HashAlgorithm)SHA1.Create()) using (var incrementalHash = version == 3 ? IncrementalHash.CreateHash(HashAlgorithmName.MD5) : IncrementalHash.CreateHash(HashAlgorithmName.SHA1)) { incrementalHash.AppendData(namespaceBytes); incrementalHash.AppendData(nameBytes); hash = incrementalHash.GetHashAndReset(); /*algorithm.TransformBlock(namespaceBytes, 0, namespaceBytes.Length, null, 0); * algorithm.TransformFinalBlock(nameBytes, 0, nameBytes.Length); * hash = algorithm.Hash;*/ // todo verify correctness; } // most bytes from the hash are copied straight to the bytes of the new GUID (steps 5-7, 9, 11-12) byte[] newGuid = new byte[16]; Array.Copy(hash, 0, newGuid, 0, 16); // set the four most significant bits (bits 12 through 15) of the time_hi_and_version field to the appropriate 4-bit version number from Section 4.1.3 (step 8) newGuid[6] = (byte)((newGuid[6] & 0x0F) | (version << 4)); // set the two most significant bits (bits 6 and 7) of the clock_seq_hi_and_reserved to zero and one, respectively (step 10) newGuid[8] = (byte)((newGuid[8] & 0x3F) | 0x80); // convert the resulting UUID to local byte order (step 13) GuidCreator.SwapByteOrder(newGuid); return(new Guid(newGuid)); }
internal void ValidateOptions(DiagnosticBag diagnostics, CommonMessageProvider messageProvider, bool isDeterministic) { if (!DebugInformationFormat.IsValid()) { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_InvalidDebugInformationFormat, Location.None, (int)DebugInformationFormat)); } foreach (var instrumentationKind in InstrumentationKinds) { if (!instrumentationKind.IsValid()) { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_InvalidInstrumentationKind, Location.None, (int)instrumentationKind)); } } if (OutputNameOverride != null) { MetadataHelpers.CheckAssemblyOrModuleName(OutputNameOverride, messageProvider, messageProvider.ERR_InvalidOutputName, diagnostics); } if (FileAlignment != 0 && !IsValidFileAlignment(FileAlignment)) { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_InvalidFileAlignment, Location.None, FileAlignment)); } if (!SubsystemVersion.Equals(SubsystemVersion.None) && !SubsystemVersion.IsValid) { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_InvalidSubsystemVersion, Location.None, SubsystemVersion.ToString())); } if (PdbChecksumAlgorithm.Name != null) { try { IncrementalHash.CreateHash(PdbChecksumAlgorithm).Dispose(); } catch { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_InvalidHashAlgorithmName, Location.None, PdbChecksumAlgorithm.ToString())); } } else if (isDeterministic) { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.ERR_InvalidHashAlgorithmName, Location.None, "")); } if (PdbFilePath != null && !PathUtilities.IsValidFilePath(PdbFilePath)) { diagnostics.Add(messageProvider.CreateDiagnostic(messageProvider.FTL_InvalidInputFileName, Location.None, PdbFilePath)); } }
static void Main(string[] args) { using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA1); //for (int i = 0; i < 1000_1000; i++) //{ // Digest(hasher, "Notch"); // Digest(hasher, "jeb_"); // Digest(hasher, "simon"); //} Console.WriteLine("Notch: " + Digest(hasher, "Notch")); Console.WriteLine("jeb_: " + Digest(hasher, "jeb_")); Console.WriteLine("simon: " + Digest(hasher, "simon")); }
public string Encrypt(string input, string key, TokenSalt salt) { var keyBuffer = Encoding.UTF8.GetBytes(key + salt.Salt); using (var sha = IncrementalHash.CreateHMAC(_settings.HashAlgorithm, keyBuffer)) { sha.AppendData(Encoding.UTF8.GetBytes(input)); var buffer = sha.GetHashAndReset(); var token = BitConverter.ToString(buffer).Replace("-", string.Empty).ToLower(); return(_AddSalt(token, salt)); } }
internal MD5Wrapper() { #if WINDOWS_RT this.hash = HashAlgorithmProvider.OpenAlgorithm("MD5").CreateHash(); #elif WINDOWS_PHONE throw new NotSupportedException(SR.WindowsPhoneDoesNotSupportMD5); #elif NETCORE this.hash = IncrementalHash.CreateHash(HashAlgorithmName.MD5); #else // Sandboxable: No native methods //this.hash = this.version1MD5 ? MD5.Create() : new NativeMD5(); this.hash = MD5.Create(); #endif }
public static void VerifyTrivialHMAC_Span(HMAC referenceAlgorithm, HashAlgorithmName hashAlgorithm) { using (referenceAlgorithm) using (IncrementalHash incrementalHash = IncrementalHash.CreateHMAC(hashAlgorithm, s_hmacKey)) { referenceAlgorithm.Key = s_hmacKey; byte[] referenceHash = referenceAlgorithm.ComputeHash(Array.Empty <byte>()); byte[] incrementalResult = new byte[referenceHash.Length]; Assert.True(incrementalHash.TryGetHashAndReset(incrementalResult, out int bytesWritten)); Assert.Equal(referenceHash.Length, bytesWritten); Assert.Equal(referenceHash, incrementalResult); } }
// Used in cases where we can bail out of getting a normalized hash early // The position of the filestream must match the state of incremental hash // This makes this usable in the case where we stop normalizing // part way through the file and can continue hashing the raw bytes with this function private byte[] GetRawChecksum(FileStream fs, IncrementalHash hash) { // Read the remainder of the stream in 4k chunks byte[] fileBytes = new byte[4096]; int bytesRead = 0; do { bytesRead = fs.Read(fileBytes, 0, fileBytes.Length); hash.AppendData(fileBytes, 0, bytesRead); } while (bytesRead != 0); return(hash.GetHashAndReset()); }
public static bool TryGetHashAndReset(this IncrementalHash incrementalHash, Span <byte> dest, out int bytesWritten) { var hash = incrementalHash.GetHashAndReset(); if (dest.Length < hash.Length) { bytesWritten = 0; return(false); } hash.CopyTo(dest); bytesWritten = hash.Length; return(true); }
internal MD5Wrapper() { #if DOTNET5_4 this.hash = IncrementalHash.CreateHash(HashAlgorithmName.MD5); #else if (CloudStorageAccount.UseV1MD5) { this.hash = new MD5CryptoServiceProvider(); } else { this.hash = new NativeMD5(); } #endif }
public static string Digest(IncrementalHash hasher, string input) { Span <byte> buffer = stackalloc byte[1024]; var src = input.AsSpan(); while (src.Length > 0) { if (Utf8.FromUtf16(src, buffer, out int read, out int written) != OperationStatus.Done) { throw new Exception(); } hasher.AppendData(buffer.Slice(0, written)); src = src[read..];
public static string ToMd5(this string stringToHash) { var md5 = IncrementalHash.CreateHash(HashAlgorithmName.MD5); md5.AppendData(Encoding.UTF8.GetBytes(stringToHash)); var stringBuilder = new StringBuilder(); var arrByte = md5.GetHashAndReset(); for (int i = 0; i < arrByte.Length; i++) { stringBuilder.Append(arrByte[i].ToString("x2")); } return(stringBuilder.ToString()); }
public void SignDataVerifyHash_SHA1(DSASignatureFormat signatureFormat) { HashAlgorithmName hashAlgorithm = HashAlgorithmName.SHA1; KeyDescription key = GetKey(); byte[] signature = SignData(key, _typeNameBytes, hashAlgorithm, signatureFormat); CheckLength(key, signature, signatureFormat); using (IncrementalHash hash = IncrementalHash.CreateHash(hashAlgorithm)) { hash.AppendData(_typeNameBytes); Assert.True(VerifyHash(key, hash.GetHashAndReset(), signature, signatureFormat)); } }
/// <summary> /// Compute a hash of the bytes of the buffer within the frames of the given NetMQMessage. /// </summary> /// <param name="hash">the hashing-algorithm to employ</param> /// <param name="message">the NetMQMessage whose frames are to be hashed</param> private static byte[] Hash(IncrementalHash hash, NetMQMessage message) { foreach (var frame in message) { // Access the byte-array that is the frame's buffer. byte[] bytes = frame.ToByteArray(true); // Compute the hash value for the region of the input byte-array (bytes), starting at index 0, // and copy the resulting hash value back into the same byte-array. //hash.TransformBlock(bytes, 0, bytes.Length, bytes, 0); hash.AppendData(bytes); } return(hash.GetHashAndReset()); }
public static bool TryComputeHash(ReadOnlySequence <byte> sequence, ReadOnlySpan <byte> key, Span <byte> destination) { if (destination.Length < 32) { throw new ArgumentOutOfRangeException(nameof(destination)); } Span <byte> extendedKey = stackalloc byte[_blockLength]; if (key.Length > _blockLength) { Sha2_256.TryComputeHash(key, extendedKey); } else { BytesOperations.Copy(key, extendedKey, Math.Min(key.Length, extendedKey.Length)); } Span <byte> ixor = stackalloc byte[_blockLength]; BytesOperations.Xor(_ipad, extendedKey, ixor); Span <byte> oxor = stackalloc byte[_blockLength]; BytesOperations.Xor(_opad, extendedKey, oxor); Span <byte> ihash = stackalloc byte[32]; using (var incrementalHash = IncrementalHash.CreateHash(HashAlgorithmName.SHA256)) { incrementalHash.AppendData(ixor); foreach (var segment in sequence) { incrementalHash.AppendData(segment.Span); } incrementalHash.TryGetHashAndReset(ihash, out _); } using (var incrementalHash = IncrementalHash.CreateHash(HashAlgorithmName.SHA256)) { incrementalHash.AppendData(oxor); incrementalHash.AppendData(ihash); return(incrementalHash.TryGetHashAndReset(destination, out _)); } }
/// <summary> /// 对字节数组进行加密。 /// </summary> /// <param name="array"></param> /// <returns></returns> public virtual byte[] Encrypt(byte[] array) { #if NETSTANDARD2_0 var algorithm = new HashAlgorithmName(algorithmName); using (var hasher = IncrementalHash.CreateHash(algorithm)) { hasher.AppendData(array); return(hasher.GetHashAndReset()); } #else using (var algorithm = HashAlgorithm.Create(algorithmName)) { return(algorithm.ComputeHash(array, 0, array.Length)); } #endif }
/// <inheritdoc /> public string Generate(IEnumerable <string> elements) { ArgumentGuard.NotNull(elements, nameof(elements)); using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.MD5); foreach (string element in elements) { byte[] buffer = Encoding.UTF8.GetBytes(element); hasher.AppendData(buffer); hasher.AppendData(Separator); } byte[] hash = hasher.GetHashAndReset(); return(ByteArrayToHex(hash)); }
public void Dispose() { if (this.hash != null) { this.hash.Dispose(); this.hash = null; } #if DOTNET5_4 if (this.nativeMd5 != null) { this.nativeMd5.Dispose(); this.nativeMd5 = null; } #endif }
public static void VerifyEmptyHash(HashAlgorithm referenceAlgorithm, HashAlgorithmName hashAlgorithm) { using (referenceAlgorithm) using (IncrementalHash incrementalHash = IncrementalHash.CreateHash(hashAlgorithm)) { for (int i = 0; i < 10; i++) { incrementalHash.AppendData(Array.Empty <byte>()); } byte[] referenceHash = referenceAlgorithm.ComputeHash(Array.Empty <byte>()); byte[] incrementalResult = incrementalHash.GetHashAndReset(); Assert.Equal(referenceHash, incrementalResult); } }
public static string GetMd5(string source) { using (var ih = IncrementalHash.CreateHash(HashAlgorithmName.MD5)) { ih.AppendData(Encoding.UTF8.GetBytes(source)); var bytes = ih.GetHashAndReset(); StringBuilder sbBytes = new StringBuilder(bytes.Length * 2); foreach (byte b in bytes) { sbBytes.AppendFormat("{0:X2}", b); } return(sbBytes.ToString()); } //return System.Web.Security.FormsAuthentication.HashPasswordForStoringInConfigFile(source, "MD5"); }
public void IncrementalHashLargeBuffer() { var buffer = new byte[12345]; new Random().NextBytes(buffer); var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA1); hasher.AppendData(buffer, 0, 11345); var hash = hasher.GetHashAndReset(); hasher.AppendData(new Memory <byte> (buffer).Slice(0, 11345)); byte[] destination = hasher.GetHashAndReset(); Assert.IsTrue(hash.AsSpan().SequenceEqual(destination.AsSpan())); }
public static void VerifyBounds_GetHashAndReset_Hash(HashAlgorithm referenceAlgorithm, HashAlgorithmName hashAlgorithm) { using (referenceAlgorithm) using (IncrementalHash incremental = IncrementalHash.CreateHash(hashAlgorithm)) { byte[] comparison = referenceAlgorithm.ComputeHash(Array.Empty <byte>()); VerifyBounds( comparison, incremental, inc => inc.GetHashAndReset(), (inc, dest) => inc.GetHashAndReset(dest), (IncrementalHash inc, Span <byte> dest, out int bytesWritten) => inc.TryGetHashAndReset(dest, out bytesWritten)); } }
private static void PBKDF2_SHA256(HMACSHA256 mac, byte[] password, byte[] salt, int saltLength, long iterationCount, byte[] derivedKey, int derivedKeyLength) { TR.Enter(); if (derivedKeyLength > (Math.Pow(2, 32) - 1) * 32) { TR.Exit(); throw new ArgumentException("Requested key length too long"); } var U = new byte[32]; var T = new byte[32]; var saltBuffer = new byte[saltLength + 4]; var blockCount = (int)Math.Ceiling((double)derivedKeyLength / 32); var r = derivedKeyLength - (blockCount - 1) * 32; Buffer.BlockCopy(salt, 0, saltBuffer, 0, saltLength); using (var incrementalHasher = IncrementalHash.CreateHMAC(HashAlgorithmName.SHA256, mac.Key)) { for (int i = 1; i <= blockCount; i++) { saltBuffer[saltLength + 0] = (byte)(i >> 24); saltBuffer[saltLength + 1] = (byte)(i >> 16); saltBuffer[saltLength + 2] = (byte)(i >> 8); saltBuffer[saltLength + 3] = (byte)(i); mac.Initialize(); incrementalHasher.AppendData(saltBuffer, 0, saltBuffer.Length); Buffer.BlockCopy(incrementalHasher.GetHashAndReset(), 0, U, 0, U.Length); Buffer.BlockCopy(U, 0, T, 0, 32); for (long j = 1; j < iterationCount; j++) { incrementalHasher.AppendData(U, 0, U.Length); Buffer.BlockCopy(incrementalHasher.GetHashAndReset(), 0, U, 0, U.Length); for (int k = 0; k < 32; k++) { T[k] ^= U[k]; } } Buffer.BlockCopy(T, 0, derivedKey, (i - 1) * 32, (i == blockCount ? r : 32)); } } TR.Exit(); }
/// <summary> /// ownerKey, documentID must be setuped /// </summary> private void setupGlobalEncryptionKey(byte[] documentId, byte[] userPad, byte[] ownerKey, int permissions) { DocumentId = documentId; OwnerKey = ownerKey; Permissions = permissions; // use variable keylength Mkey = new byte[_keyLength / 8]; //fixed by ujihara in order to follow PDF refrence var md5 = IncrementalHash.CreateHash(HashAlgorithmName.MD5); md5.AppendData(userPad, 0, userPad.Length); md5.AppendData(ownerKey, 0, ownerKey.Length); byte[] ext = new byte[4]; ext[0] = (byte)permissions; ext[1] = (byte)(permissions >> 8); ext[2] = (byte)(permissions >> 16); ext[3] = (byte)(permissions >> 24); md5.AppendData(ext, 0, 4); if (documentId != null) { md5.AppendData(documentId, 0, documentId.Length); } if (!_encryptMetadata) { md5.AppendData(MetadataPad, 0, MetadataPad.Length); } byte[] digest = new byte[Mkey.Length]; Array.Copy(md5.GetHashAndReset(), 0, digest, 0, Mkey.Length); // only use the really needed bits as input for the hash if (_revision == STANDARD_ENCRYPTION_128 || _revision == AES_128) { for (int k = 0; k < 50; ++k) { using (var md5Hash = MD5.Create()) { Array.Copy(md5Hash.ComputeHash(digest), 0, digest, 0, Mkey.Length); } } } Array.Copy(digest, 0, Mkey, 0, Mkey.Length); }
private string GetDirectoryHash(string directory) { var files = Directory.EnumerateFiles(directory, "*.*", SearchOption.AllDirectories).OrderBy(x => x); using (IncrementalHash md5 = IncrementalHash.CreateHash(HashAlgorithmName.MD5)) { foreach (var f in files) { FileInfo info = new FileInfo(f); md5.AppendData(Encoding.UTF8.GetBytes(info.FullName)); //path md5.AppendData(BitConverter.GetBytes(info.Length)); //size md5.AppendData(BitConverter.GetBytes(info.LastWriteTimeUtc.Ticks)); //last written } return((!files.Any() ? new byte[16] : md5.GetHashAndReset()).ToMD5String()); //Enforce empty hash string if no files } }
internal PdbLogger(bool logging) { _logging = logging; if (logging) { // do not get this from pool // we need a fairly large buffer here (where the pool typically contains small ones) // and we need just one per compile session // pooling will be couter-productive in such scenario _logData = new BlobBuilder(bufferFlushLimit); _incrementalHash = IncrementalHash.CreateHash(HashAlgorithmName.SHA1); } else { _logData = null; _incrementalHash = null; } }