public override bool VerifySignature(byte[] hash, byte[] signature) { if (hash == null) { throw new ArgumentNullException(nameof(hash)); } if (signature == null) { throw new ArgumentNullException(nameof(signature)); } SafeDsaHandle key = _key.Value; int expectedSignatureBytes = Interop.Crypto.DsaSignatureFieldSize(key) * 2; if (signature.Length != expectedSignatureBytes) { // The input isn't of the right length (assuming no DER), so we can't sensibly re-encode it with DER. return(false); } byte[] openSslFormat = OpenSslAsymmetricAlgorithmCore.ConvertIeee1363ToDer(signature); return(Interop.Crypto.DsaVerify(key, hash, hash.Length, openSslFormat, openSslFormat.Length)); }
public override bool VerifyHash(byte[] hash, byte[] signature) { if (hash == null) { throw new ArgumentNullException(nameof(hash)); } if (signature == null) { throw new ArgumentNullException(nameof(signature)); } // The signature format for .NET is r.Concat(s). Each of r and s are of length BitsToBytes(KeySize), even // when they would have leading zeroes. If it's the correct size, then we need to encode it from // r.Concat(s) to SEQUENCE(INTEGER(r), INTEGER(s)), because that's the format that OpenSSL expects. int expectedBytes = 2 * OpenSslAsymmetricAlgorithmCore.BitsToBytes(KeySize); if (signature.Length != expectedBytes) { // The input isn't of the right length, so we can't sensibly re-encode it. return(false); } byte[] openSslFormat = OpenSslAsymmetricAlgorithmCore.ConvertIeee1363ToDer(signature); SafeEcKeyHandle key = _key.Value; int verifyResult = Interop.Crypto.EcDsaVerify(hash, hash.Length, openSslFormat, openSslFormat.Length, key); return(verifyResult == 1); }