public override bool VerifySignature(byte[] hash, byte[] signature) { if (hash == null) { throw new ArgumentNullException(nameof(hash)); } if (signature == null) { throw new ArgumentNullException(nameof(signature)); } SafeDsaHandle key = _key.Value; int expectedSignatureBytes = Interop.Crypto.DsaSignatureFieldSize(key) * 2; if (signature.Length != expectedSignatureBytes) { // The input isn't of the right length (assuming no DER), so we can't sensibly re-encode it with DER. return(false); } byte[] openSslFormat = OpenSslAsymmetricAlgorithmCore.ConvertIeee1363ToDer(signature); return(Interop.Crypto.DsaVerify(key, hash, hash.Length, openSslFormat, openSslFormat.Length)); }
public override byte[] CreateSignature(byte[] hash) { if (hash == null) { throw new ArgumentNullException(nameof(hash)); } SafeDsaHandle key = _key.Value; byte[] signature = new byte[Interop.Crypto.DsaEncodedSignatureSize(key)]; int signatureSize; bool success = Interop.Crypto.DsaSign(key, hash, hash.Length, signature, out signatureSize); if (!success) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } Debug.Assert( signatureSize <= signature.Length, "DSA_sign reported an unexpected signature size", "DSA_sign reported signatureSize was {0}, when <= {1} was expected", signatureSize, signature.Length); int signatureFieldSize = Interop.Crypto.DsaSignatureFieldSize(key) * BitsPerByte; byte[] converted = OpenSslAsymmetricAlgorithmCore.ConvertDerToIeee1363(signature, 0, signatureSize, signatureFieldSize); return(converted); }
public override bool VerifyHash(byte[] hash, byte[] signature) { if (hash == null) { throw new ArgumentNullException(nameof(hash)); } if (signature == null) { throw new ArgumentNullException(nameof(signature)); } // The signature format for .NET is r.Concat(s). Each of r and s are of length BitsToBytes(KeySize), even // when they would have leading zeroes. If it's the correct size, then we need to encode it from // r.Concat(s) to SEQUENCE(INTEGER(r), INTEGER(s)), because that's the format that OpenSSL expects. int expectedBytes = 2 * OpenSslAsymmetricAlgorithmCore.BitsToBytes(KeySize); if (signature.Length != expectedBytes) { // The input isn't of the right length, so we can't sensibly re-encode it. return(false); } byte[] openSslFormat = OpenSslAsymmetricAlgorithmCore.ConvertIeee1363ToDer(signature); SafeEcKeyHandle key = _key.Value; int verifyResult = Interop.Crypto.EcDsaVerify(hash, hash.Length, openSslFormat, openSslFormat.Length, key); return(verifyResult == 1); }
protected override byte[] HashData(byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) { // we're sealed and the base should have checked this already Debug.Assert(data != null); Debug.Assert(offset >= 0 && offset <= data.Length); Debug.Assert(count >= 0 && count <= data.Length); Debug.Assert(!string.IsNullOrEmpty(hashAlgorithm.Name)); return(OpenSslAsymmetricAlgorithmCore.HashData(data, offset, count, hashAlgorithm)); }
public override byte[] SignHash(byte[] hash) { if (hash == null) { throw new ArgumentNullException(nameof(hash)); } SafeEcKeyHandle key = _key.Value; int signatureLength = Interop.Crypto.EcDsaSize(key); byte[] signature = new byte[signatureLength]; if (!Interop.Crypto.EcDsaSign(hash, hash.Length, signature, ref signatureLength, key)) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } byte[] converted = OpenSslAsymmetricAlgorithmCore.ConvertDerToIeee1363(signature, 0, signatureLength, KeySize); return(converted); }
protected override byte[] HashData(Stream data, HashAlgorithmName hashAlgorithm) { return(OpenSslAsymmetricAlgorithmCore.HashData(data, hashAlgorithm)); }
protected override byte[] HashData(byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) { return(OpenSslAsymmetricAlgorithmCore.HashData(data, offset, count, hashAlgorithm)); }