예제 #1
0
 public FastRandomGenerator(SafeRandomGenerator safeRandomGenerator, IDigest digest)
 {
     _safeRandomGenerator = safeRandomGenerator;
     _safeRandomGeneratorIsMineExclusively = false;
     _myPrng     = new DigestRandomGenerator(digest);
     _digestSize = digest.GetDigestSize();
     SeedSize    = _digestSize;
     Reseed();
 }
예제 #2
0
 public FastRandomGenerator(List <IEntropyHasher> entropyHashers, IDigest digest)
 {
     _safeRandomGenerator = new SafeRandomGenerator(entropyHashers);
     _safeRandomGeneratorIsMineExclusively = true;
     _myPrng     = new DigestRandomGenerator(digest);
     _digestSize = digest.GetDigestSize();
     SeedSize    = _digestSize;
     Reseed();
 }
예제 #3
0
        /* BouncyCastle DigestRandomGenerator Analysis
         * BouncyCastle DigestRandomGenerator maintains two separate but related internal states, represented by the following:
         *     byte[] seed
         *     long   seedCounter
         *     byte[] state
         *     long   stateCounter
         * The size of seed and state are both equal to the size of the digest.  I am going to refer to the digest size, in bits,
         * as "M".  The counters are obviously 64 bits each.
         *
         * In order to generate repeated output, there would need to be a collision of stateCounter, state, and seed.  We expect a seed
         * collision every 2^(M/2) times that we cycle seed.  We expect a state collision every 2^(M/2) times that we GenerateState,
         * and stateCounter will repeat itself every 2^64 times that we call GenerateState.  This means we can never have a repeated
         * stateCounter&state&seed in less than 2^64 calls to GenerateState, and very likely, it would be much much larger than that.
         *
         * GenerateState is called at least once for every call to NextBytes, and it's called more times, if the number of bytes reqested
         * >= digest size in bytes.  We can easily measure the number of calls to GenerateState, by counting 1+(bytes.Length/digest.Size),
         * and we want to ensure this number is always below 2^64, which is UInt64.MaxValue
         *
         * bytes.Length is an Int32.  We can easily guarantee we'll never repeat an internal state, if we use a UInt64 to tally the
         * number of calls to GenerateState, and require new seed material before UInt64.MaxValue - Int32.MaxValue.  This is a huge number.
         *
         * To put this in perspective, supposing a 128 bit digest, and supposing the user on average requests 8 bytes per call to NextBytes.
         * Then there is guaranteed to be no repeat state before 147 quintillion bytes (147 billion billion).  So let's just tone this
         * down a bit, and choose thresholds that are way more conservative.
         *
         * Completely unrelated to analysis of DigestRandomGenerator, some other prng's (fortuna) recommend new seed material in 2^20
         * iterations, due to limitations they have, which we don't have.  So let's just ensure we end up choosing thresholds that are down
         * on-par with that level, even though completely unnecessary for us, it will feel conservative and safe.
         *
         * Let's use a plain old int to tally the number of calls to GenerateState.  We need to ensure we never overflow this counter, so
         * let's assume all digests are at least 4 bytes, and let's require new seed material every int.MaxValue/2.  This is basically
         * 1 billion calls to NextBytes, so a few GB of random data or so.  Extremely safe and conservative.
         *
         * But let's squish it down even more than that.  FastRandomGenerator performs approx 1,000 times faster than SafeRandomGenerator.  So to
         * maximize the sweet spot between strong security and good performance, let's only stretch the entropy 1,000,000 times at hard
         * maximum, and 64,000 times softly suggested.  Typically, for example with Sha256, this means we'll generate up to 2MB before
         * requesting reseed, and up to 32MB before requiring reseed.
         *
         * Now we're super duper conservative, being zillions of times more conservative than necessary, maximally conservative to the point
         * where we do not take an appreciable performance degradation.
         */

        public FastRandomGenerator()
        {
            _safeRandomGenerator = new SafeRandomGenerator();
            _safeRandomGeneratorIsMineExclusively = true;
            IDigest digest = new Sha512Digest();

            _myPrng     = new DigestRandomGenerator(digest);
            _digestSize = digest.GetDigestSize();
            SeedSize    = _digestSize;
            Reseed();
        }
 internal FastRandomGenerator(
     SafeRandomGenerator safeRng,
     bool ownsSafeRng,
     IDigest digest)
 {
     if (digest == null)
     {
         throw new ArgumentNullException(nameof(digest));
     }
     _safeRandomGenerator     = safeRng ?? throw new ArgumentNullException(nameof(safeRng));
     _ownsSafeRandomGenerator = ownsSafeRng;
     _prng       = new DigestRandomGenerator(digest);
     _digestSize = digest.GetDigestSize();
     SeedSize    = _digestSize;
     Reseed();
 }
 public FastRandomGenerator(SafeRandomGenerator safeRandomGenerator, IDigest digest) : this(
         safeRng : safeRandomGenerator,
         ownsSafeRng : false,
         digest : digest)
 {
 }
 public FastRandomGenerator(SafeRandomGenerator safeRandomGenerator) : this(
         safeRng : safeRandomGenerator,
         ownsSafeRng : false,
         digest : new Sha512Digest())
 {
 }