/// <summary> /// Returns address from the given seed. Uses CPU for calculation. /// </summary> public static string TestCpu(byte[] seed) { byte[] secretBytes = new byte[32]; byte[] indexBytes = new byte[4]; byte[] publicKeyBytes = new byte[32]; byte[] checksumBytes = new byte[5]; byte[] tmp = new byte[64]; Job.AddressBuffer addressBuffer = new(Job.AddressPrefix.Length + 60); addressBuffer.Append(Job.AddressPrefix); var hasher = Blake2b.CreateIncrementalHasher(32); hasher.Update(seed); hasher.Update(indexBytes); hasher.Finish(secretBytes); Chaos.NaCl.Internal.Ed25519Ref10.Ed25519Operations.crypto_public_key( secretBytes, 0, publicKeyBytes, 0, tmp); Blake2b.ComputeAndWriteHash(5, publicKeyBytes, checksumBytes); Job.Reverse(checksumBytes); Job.NanoBase32(publicKeyBytes, ref addressBuffer); Job.NanoBase32(checksumBytes, ref addressBuffer); return(addressBuffer.ToString()); }
public static KernelMap <T> GetOrAdd(int isize, int osize, InterpolationSettings interpolator, int ichannels, double offset) { if (!(interpolator.WeightingFunction is IUniquelyIdentifiable iwf)) { return(KernelMap <T> .create(isize, osize, interpolator, ichannels, offset)); } var hasher = Blake2b.CreateIncrementalHasher(Unsafe.SizeOf <Guid>()); hasher.Update(isize); hasher.Update(osize); hasher.Update(ichannels); hasher.Update(offset); hasher.Update(iwf.UniqueID); hasher.Update(interpolator.Blur); Span <byte> hash = stackalloc byte[Unsafe.SizeOf <Guid>()]; hasher.Finish(hash); var key = MemoryMarshal.Read <Guid>(hash); if (lruCache.TryGet(key, out var map)) { return(map); } return(lruCache.GetOrAdd(key, KernelMap <T> .create(isize, osize, interpolator, ichannels, offset))); }
/// <summary>Constructs a new <see cref="CubicInterpolator" /> with the specified <paramref name="b" /> and <paramref name="c" /> values.</summary> /// <param name="b">Controls the smoothness of the filter. Larger values smooth/blur more. Values > 1.0 are not recommended.</param> /// <param name="c">Controls the sharpness of the filter. Larger values sharpen more. Values > 1.0 are not recommended.</param> public CubicInterpolator(double b = 0.0, double c = 0.5) { if (b < 0.0) { throw new ArgumentOutOfRangeException(nameof(b), "Value must be greater than or equal to 0"); } if (c < 0.0) { throw new ArgumentOutOfRangeException(nameof(c), "Value must be greater than or equal to 0"); } support = b == 0.0 && c == 0.0 ? 1.0 : 2.0; p0 = (6.0 - 2.0 * b) / 6.0; p2 = (-18.0 + 12.0 * b + c * 6.0) / 6.0; p3 = (12.0 - 9.0 * b - c * 6.0) / 6.0; q0 = (8.0 * b + c * 24.0) / 6.0; q1 = (-12.0 * b - c * 48.0) / 6.0; q2 = (6.0 * b + c * 30.0) / 6.0; q3 = (-b - c * 6.0) / 6.0; displayString = $"{nameof(CubicInterpolator)}({b}, {c})"; var hasher = Blake2b.CreateIncrementalHasher(Unsafe.SizeOf <Guid>()); hasher.Update(fullName.AsSpan()); hasher.Update(b); hasher.Update(c); uniqueID = hasher.FinalizeToGuid(); }
/// <summary> /// Since data provided might be massive, it is not recommended to read it all into memory at the same time. /// For this reason we use Streams and only hash it in chunks /// </summary> /// <param name="data">Stream of data to be hashed</param> /// <param name="digestLength">Optional parameter for digest Length to be used when Hashing. Defaults to 32</param> /// <param name="bufferToDigestRatio">Optional parameter to define buffer size as a ratio to the Digest Length. Defaults to 128 /// The bigger the bufferToDigestRatio, the more you are sacrificing memory used to gain speed.</param> /// <returns>Returns Base64Url Encoded String with hashed value</returns> public static string ComputeBase64Blake2bHashInBuffers(Stream data, int digestLength = 32, int bufferToDigestRatio = 128) { var hasher = Blake2b.CreateIncrementalHasher(digestLength); var buffer = ArrayPool <byte> .Shared.Rent(digestLength *bufferToDigestRatio); int bytesRead; while ((bytesRead = data.Read(buffer, 0, buffer.Length)) > 0) { hasher.Update(new Span <byte>(buffer, 0, bytesRead)); } ArrayPool <byte> .Shared.Return(buffer); return(WebEncoders.Base64UrlEncode(hasher.Finish())); }
/// <summary>Constructs a new <see cref="LanczosInterpolator" /> with the specified number of <paramref name="lobes" />.</summary> /// <param name="lobes">Controls the <see cref="Support" /> size of the windowed sinc function. Greater values increase the cost of the resulting filter significantly.</param> public LanczosInterpolator(int lobes = 3) { if (lobes <= 0) { throw new ArgumentOutOfRangeException(nameof(lobes), "Value must be greater than 0"); } support = lobes; isupport = 1.0 / support; displayString = $"{nameof(LanczosInterpolator)}({lobes})"; var hasher = Blake2b.CreateIncrementalHasher(Unsafe.SizeOf <Guid>()); hasher.Update(fullName.AsSpan()); hasher.Update(lobes); uniqueID = hasher.FinalizeToGuid(); }
private static byte[] blake2bSelfTest() { var inc = Blake2b.CreateIncrementalHasher(blake2bCheck.Length); foreach (int diglen in new[] { 20, 32, 48, 64 }) { foreach (int msglen in new[] { 0, 3, 128, 129, 255, 1024 }) { var msg = getTestSequence(msglen); var key = getTestSequence(diglen); inc.Update(Blake2b.ComputeHash(diglen, msg)); inc.Update(Blake2b.ComputeHash(diglen, key, msg)); } } return(inc.Finish()); }
/// <summary>Constructs a new <see cref="GaussianInterpolator" /> with the specified <paramref name="sigma" />.</summary> /// <param name="sigma">The sigma value (sometimes called radius) for the interpolation function. Larger values produce more blurring.</param> public GaussianInterpolator(double sigma) { if (sigma <= 0.0) { throw new ArgumentOutOfRangeException(nameof(sigma), "Value must be greater than 0"); } support = sigma * 3.0; s0 = 1.0 / (2.0 * sigma * sigma); s1 = 1.0 / Sqrt(PI * 2.0 * sigma * sigma); displayString = $"{nameof(GaussianInterpolator)}({sigma})"; var hasher = Blake2b.CreateIncrementalHasher(Unsafe.SizeOf <Guid>()); hasher.Update(fullName.AsSpan()); hasher.Update(sigma); uniqueID = hasher.FinalizeToGuid(); }
/// <summary>Constructs a new <see cref="QuadraticInterpolator" /> with the specified <paramref name="r" /> value.</summary> /// <param name="r">A value between 0.5 and 1.5, where lower values produce a smoother filter and higher values produce a sharper filter.</param> public QuadraticInterpolator(double r = 1.0) { if (r < 0.5 || r > 1.5) { throw new ArgumentOutOfRangeException(nameof(r), "Value must be between 0.5 and 1.5"); } this.r = r; r0 = -2.0 * r; r1 = -2.0 * r - 0.5; r2 = 0.5 * (r + 1.0); r3 = 0.75 * (r + 1.0); displayString = $"{nameof(QuadraticInterpolator)}({r})"; var hasher = Blake2b.CreateIncrementalHasher(Unsafe.SizeOf <Guid>()); hasher.Update(fullName.AsSpan()); hasher.Update(r); uniqueID = hasher.FinalizeToGuid(); }
private static byte[] blake2bNoAllocSelfTest() { Span <byte> buff = stackalloc byte[Blake2b.DefaultDigestLength]; var inc = Blake2b.CreateIncrementalHasher(blake2bCheck.Length); foreach (int diglen in new[] { 20, 32, 48, 64 }) { foreach (int msglen in new[] { 0, 3, 128, 129, 255, 1024 }) { var msg = getTestSequence(msglen); var key = getTestSequence(diglen); Blake2b.ComputeAndWriteHash(diglen, msg, buff); inc.Update(buff.Slice(0, diglen)); Blake2b.ComputeAndWriteHash(diglen, key, msg, buff); inc.Update(buff.Slice(0, diglen)); } } return(inc.TryFinish(buff, out int len) ? buff.Slice(0, len).ToArray() : Array.Empty <byte>()); }
public override void Run(object?arg) { string prefix1 = string.Concat(AddressPrefix, "3", keyword); string prefix2 = string.Concat(AddressPrefix, "1", keyword); string suffix1 = keyword; byte[] seedBytes = new byte[32]; byte[] secretBytes = new byte[32]; byte[] indexBytes = new byte[4]; byte[] publicKeyBytes = new byte[32]; byte[] checksumBytes = new byte[5]; byte[] tmp = new byte[64]; AddressBuffer addressBuffer = new(AddressPrefix.Length + 60); bool canMatchPrefix = this.canMatchPrefix; bool canMatchSuffix = this.canMatchSuffix; CancellationToken cancellationToken = this.cancellationToken; System.Action <string, string> resultCallback = this.resultCallback; addressBuffer.Append(AddressPrefix); while (!cancellationToken.IsCancellationRequested) { random.GetBytes(seedBytes); var hasher = Blake2b.CreateIncrementalHasher(32); hasher.Update(seedBytes); hasher.Update(indexBytes); hasher.Finish(secretBytes); Chaos.NaCl.Internal.Ed25519Ref10.Ed25519Operations.crypto_public_key( secretBytes, 0, publicKeyBytes, 0, tmp); Blake2b.ComputeAndWriteHash(5, publicKeyBytes, checksumBytes); Reverse(checksumBytes); NanoBase32(publicKeyBytes, ref addressBuffer); NanoBase32(checksumBytes, ref addressBuffer); bool isMatched = false; if (canMatchPrefix) { isMatched = addressBuffer.StartsWith(prefix1) || addressBuffer.StartsWith(prefix2); } if (!isMatched && canMatchSuffix) { isMatched = addressBuffer.EndsWith(suffix1); } if (isMatched) { var address = addressBuffer.ToString(); if (resultCallback != null) { resultCallback.Invoke(HexUtils.HexFromByteArray(seedBytes), address); } else { FoundSeed = HexUtils.HexFromByteArray(seedBytes); FoundAddress = address; break; } } ++attempts; addressBuffer.Length = AddressPrefix.Length; } }
public void UpdateThrowsOnRefContainingSpanT() { Assert.Throws <NotSupportedException>(() => Blake2b.CreateIncrementalHasher().Update(new ReadOnlySpan <KatEntry>(KatEntry.All))); }
public void UpdateThrowsOnRefContainingT() { Assert.Throws <NotSupportedException>(() => Blake2b.CreateIncrementalHasher().Update(KatEntry.All[0])); }
public void KatBlake2bKeyed(KatEntry ka) { var hash = compute(Blake2b.CreateIncrementalHasher(ka.Key), ka.Data); Assert.True(hash.SequenceEqual(ka.Digest)); }