public void BlockPolicyEstimates()
        {
            var          dateTimeSet = new DateTimeProviderSet();
            NodeSettings settings    = NodeSettings.Default();
            var          mpool       = new TxMempool(DateTimeProvider.Default,
                                                     new BlockPolicyEstimator(new MempoolSettings(settings), settings.LoggerFactory, settings), settings.LoggerFactory, settings);
            var entry    = new TestMemPoolEntryHelper();
            var basefee  = new Money(2000);
            var deltaFee = new Money(100);
            var feeV     = new List <Money>();

            // Populate vectors of increasing fees
            for (int j = 0; j < 10; j++)
            {
                feeV.Add(basefee * (j + 1));
            }

            // Store the hashes of transactions that have been
            // added to the mempool by their associate fee
            // txHashes[j] is populated with transactions either of
            // fee = basefee * (j+1)
            var txHashes = new List <uint256> [10];

            for (int i = 0; i < txHashes.Length; i++)
            {
                txHashes[i] = new List <uint256>();
            }

            // Create a transaction template
            var garbage = new Script(Enumerable.Range(0, 128).Select(i => (byte)1).ToArray());

            var txf = new Transaction();

            txf.AddInput(new TxIn(garbage));
            txf.AddOutput(new TxOut(0L, Script.Empty));
            var baseRate = new FeeRate(basefee, txf.GetVirtualSize());

            // Create a fake block
            var block    = new List <Transaction>();
            int blocknum = 0;
            int answerFound;

            // Loop through 200 blocks
            // At a decay .998 and 4 fee transactions per block
            // This makes the tx count about 1.33 per bucket, above the 1 threshold
            while (blocknum < 200)
            {
                for (int j = 0; j < 10; j++)
                {                                                                        // For each fee
                    for (int k = 0; k < 4; k++)
                    {                                                                    // add 4 fee txs
                        Transaction tx = txf.Clone(false);
                        tx.Inputs[0].PrevOut.N = (uint)(10000 * blocknum + 100 * j + k); // make transaction unique
                        uint256 hash = tx.GetHash();
                        mpool.AddUnchecked(hash, entry.Fee(feeV[j]).Time(dateTimeSet.GetTime()).Priority(0).Height(blocknum).FromTx(tx, mpool));
                        txHashes[j].Add(hash);
                    }
                }
                //Create blocks where higher fee txs are included more often
                for (int h = 0; h <= blocknum % 10; h++)
                {
                    // 10/10 blocks add highest fee transactions
                    // 9/10 blocks add 2nd highest and so on until ...
                    // 1/10 blocks add lowest fee transactions
                    while (txHashes[9 - h].Count > 0)
                    {
                        Transaction ptx = mpool.Get(txHashes[9 - h].Last());
                        if (ptx != null)
                        {
                            block.Add(ptx);
                        }
                        txHashes[9 - h].Remove(txHashes[9 - h].Last());
                    }
                }
                mpool.RemoveForBlock(block, ++blocknum);
                block.Clear();
                if (blocknum == 30)
                {
                    // At this point we should need to combine 5 buckets to get enough data points
                    // So estimateFee(1,2,3) should fail and estimateFee(4) should return somewhere around
                    // 8*baserate.  estimateFee(4) %'s are 100,100,100,100,90 = average 98%
                    Assert.True(mpool.EstimateFee(1) == new FeeRate(0));
                    Assert.True(mpool.EstimateFee(2) == new FeeRate(0));
                    Assert.True(mpool.EstimateFee(3) == new FeeRate(0));
                    Assert.True(mpool.EstimateFee(4).FeePerK < 8 * baseRate.FeePerK + deltaFee);
                    Assert.True(mpool.EstimateFee(4).FeePerK > 8 * baseRate.FeePerK - deltaFee);

                    Assert.True(mpool.EstimateSmartFee(1, out answerFound) == mpool.EstimateFee(4) && answerFound == 4);
                    Assert.True(mpool.EstimateSmartFee(3, out answerFound) == mpool.EstimateFee(4) && answerFound == 4);
                    Assert.True(mpool.EstimateSmartFee(4, out answerFound) == mpool.EstimateFee(4) && answerFound == 4);
                    Assert.True(mpool.EstimateSmartFee(8, out answerFound) == mpool.EstimateFee(8) && answerFound == 8);
                }
            }

            var origFeeEst = new List <Money>();

            // Highest feerate is 10*baseRate and gets in all blocks,
            // second highest feerate is 9*baseRate and gets in 9/10 blocks = 90%,
            // third highest feerate is 8*base rate, and gets in 8/10 blocks = 80%,
            // so estimateFee(1) would return 10*baseRate but is hardcoded to return failure
            // Second highest feerate has 100% chance of being included by 2 blocks,
            // so estimateFee(2) should return 9*baseRate etc...
            for (int i = 1; i < 10; i++)
            {
                origFeeEst.Add(mpool.EstimateFee(i).FeePerK);
                if (i > 2)
                { // Fee estimates should be monotonically decreasing
                    Assert.True(origFeeEst[i - 1] <= origFeeEst[i - 2]);
                }
                int mult = 11 - i;
                if (i > 1)
                {
                    Assert.True(origFeeEst[i - 1] < mult * baseRate.FeePerK + deltaFee);
                    Assert.True(origFeeEst[i - 1] > mult * baseRate.FeePerK - deltaFee);
                }
                else
                {
                    Assert.True(origFeeEst[i - 1] == new FeeRate(0).FeePerK);
                }
            }

            // Mine 50 more blocks with no transactions happening, estimates shouldn't change
            // We haven't decayed the moving average enough so we still have enough data points in every bucket
            while (blocknum < 250)
            {
                mpool.RemoveForBlock(block, ++blocknum);
            }

            Assert.True(mpool.EstimateFee(1) == new FeeRate(0));
            for (int i = 2; i < 10; i++)
            {
                Assert.True(mpool.EstimateFee(i).FeePerK < origFeeEst[i - 1] + deltaFee);
                Assert.True(mpool.EstimateFee(i).FeePerK > origFeeEst[i - 1] - deltaFee);
            }

            // Mine 15 more blocks with lots of transactions happening and not getting mined
            // Estimates should go up
            while (blocknum < 265)
            {
                for (int j = 0; j < 10; j++)
                {     // For each fee multiple
                    for (int k = 0; k < 4; k++)
                    { // add 4 fee txs
                        Transaction tx = txf.Clone(false);
                        tx.Inputs[0].PrevOut.N = (uint)(10000 * blocknum + 100 * j + k);
                        uint256 hash = tx.GetHash();
                        mpool.AddUnchecked(hash, entry.Fee(feeV[j]).Time(dateTimeSet.GetTime()).Priority(0).Height(blocknum).FromTx(tx, mpool));
                        txHashes[j].Add(hash);
                    }
                }
                mpool.RemoveForBlock(block, ++blocknum);
            }

            for (int i = 1; i < 10; i++)
            {
                Assert.True(mpool.EstimateFee(i) == new FeeRate(0) || mpool.EstimateFee(i).FeePerK > origFeeEst[i - 1] - deltaFee);
                Assert.True(mpool.EstimateSmartFee(i, out answerFound).FeePerK > origFeeEst[answerFound - 1] - deltaFee);
            }

            // Mine all those transactions
            // Estimates should still not be below original
            for (int j = 0; j < 10; j++)
            {
                while (txHashes[j].Count > 0)
                {
                    Transaction ptx = mpool.Get(txHashes[j].Last());
                    if (ptx != null)
                    {
                        block.Add(ptx);
                    }
                    txHashes[j].Remove(txHashes[j].Last());
                }
            }
            mpool.RemoveForBlock(block, 265);
            block.Clear();
            Assert.True(mpool.EstimateFee(1) == new FeeRate(0));
            for (int i = 2; i < 10; i++)
            {
                Assert.True(mpool.EstimateFee(i).FeePerK > origFeeEst[i - 1] - deltaFee);
            }

            // Mine 200 more blocks where everything is mined every block
            // Estimates should be below original estimates
            while (blocknum < 465)
            {
                for (int j = 0; j < 10; j++)
                {     // For each fee multiple
                    for (int k = 0; k < 4; k++)
                    { // add 4 fee txs
                        Transaction tx = txf.Clone(false);
                        tx.Inputs[0].PrevOut.N = (uint)(10000 * blocknum + 100 * j + k);
                        uint256 hash = tx.GetHash();
                        mpool.AddUnchecked(hash, entry.Fee(feeV[j]).Time(dateTimeSet.GetTime()).Priority(0).Height(blocknum).FromTx(tx, mpool));
                        Transaction ptx = mpool.Get(hash);
                        if (ptx != null)
                        {
                            block.Add(ptx);
                        }
                    }
                }
                mpool.RemoveForBlock(block, ++blocknum);
                block.Clear();
            }
            Assert.True(mpool.EstimateFee(1) == new FeeRate(0));
            for (int i = 2; i < 10; i++)
            {
                Assert.True(mpool.EstimateFee(i).FeePerK < origFeeEst[i - 1] - deltaFee);
            }

            // Test that if the mempool is limited, estimateSmartFee won't return a value below the mempool min fee
            // and that estimateSmartPriority returns essentially an infinite value
            mpool.AddUnchecked(txf.GetHash(), entry.Fee(feeV[5]).Time(dateTimeSet.GetTime()).Priority(0).Height(blocknum).FromTx(txf, mpool));
            // evict that transaction which should set a mempool min fee of minRelayTxFee + feeV[5]
            mpool.TrimToSize(1);
            Assert.True(mpool.GetMinFee(1).FeePerK > feeV[5]);
            for (int i = 1; i < 10; i++)
            {
                Assert.True(mpool.EstimateSmartFee(i, out answerFound).FeePerK >= mpool.EstimateFee(i).FeePerK);
                Assert.True(mpool.EstimateSmartFee(i, out answerFound).FeePerK >= mpool.GetMinFee(1).FeePerK);
                Assert.True(mpool.EstimateSmartPriority(i, out answerFound) == BlockPolicyEstimator.InfPriority);
            }
        }
Exemplo n.º 2
0
            public async Task InitializeAsync()
            {
                this.blockinfo = new List <Blockinfo>();
                List <long> lst = blockinfoarr.Cast <long>().ToList();

                for (int i = 0; i < lst.Count; i += 2)
                {
                    this.blockinfo.Add(new Blockinfo {
                        extranonce = (int)lst[i], nonce = (uint)lst[i + 1]
                    });
                }

                // Note that by default, these tests run with size accounting enabled.
                this.network = KnownNetworks.RegTest;
                byte[] hex = Encoders.Hex.DecodeData("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f");
                this.scriptPubKey = new Script(new[] { Op.GetPushOp(hex), OpcodeType.OP_CHECKSIG });

                this.entry        = new TestMemPoolEntryHelper();
                this.ChainIndexer = new ChainIndexer(this.network);
                this.network.Consensus.Options = new ConsensusOptions();

                IDateTimeProvider dateTimeProvider = DateTimeProvider.Default;

                var loggerFactory = ExtendedLoggerFactory.Create();

                var nodeSettings      = new NodeSettings(this.network, args: new string[] { "-checkpoints" });
                var consensusSettings = new ConsensusSettings(nodeSettings);

                var inMemoryCoinView = new InMemoryCoinView(new HashHeightPair(this.ChainIndexer.Tip));
                var nodeStats        = new NodeStats(dateTimeProvider, loggerFactory);

                this.cachedCoinView = new CachedCoinView(this.network, new Checkpoints(), inMemoryCoinView, dateTimeProvider, new LoggerFactory(), nodeStats, consensusSettings);

                var signals       = new Signals.Signals(loggerFactory, null);
                var asyncProvider = new AsyncProvider(loggerFactory, signals, new NodeLifetime());

                var deployments = new NodeDeployments(this.network, this.ChainIndexer);

                var genesis = this.network.GetGenesis();

                var chainState = new ChainState()
                {
                    BlockStoreTip = new ChainedHeader(genesis.Header, genesis.GetHash(), 0)
                };

                var consensusRulesContainer = new ConsensusRulesContainer();

                foreach (var ruleType in this.network.Consensus.ConsensusRules.HeaderValidationRules)
                {
                    consensusRulesContainer.HeaderValidationRules.Add(Activator.CreateInstance(ruleType) as HeaderValidationConsensusRule);
                }
                foreach (var ruleType in this.network.Consensus.ConsensusRules.FullValidationRules)
                {
                    FullValidationConsensusRule rule = null;
                    if (ruleType == typeof(FlushUtxosetRule))
                    {
                        rule = new FlushUtxosetRule(new Mock <IInitialBlockDownloadState>().Object);
                    }
                    else
                    {
                        rule = Activator.CreateInstance(ruleType) as FullValidationConsensusRule;
                    }

                    consensusRulesContainer.FullValidationRules.Add(rule);
                }

                this.ConsensusRules = new PowConsensusRuleEngine(this.network, loggerFactory, dateTimeProvider, this.ChainIndexer, deployments, consensusSettings,
                                                                 new Checkpoints(), this.cachedCoinView, chainState, new InvalidBlockHashStore(dateTimeProvider), nodeStats, asyncProvider, consensusRulesContainer).SetupRulesEngineParent();

                this.consensus = ConsensusManagerHelper.CreateConsensusManager(this.network, chainState: chainState, inMemoryCoinView: inMemoryCoinView, chainIndexer: this.ChainIndexer, consensusRules: this.ConsensusRules);

                await this.consensus.InitializeAsync(chainState.BlockStoreTip);

                this.entry.Fee(11);
                this.entry.Height(11);

                var dateTimeProviderSet = new DateTimeProviderSet
                {
                    time    = dateTimeProvider.GetTime(),
                    timeutc = dateTimeProvider.GetUtcNow()
                };

                this.DateTimeProvider = dateTimeProviderSet;
                this.mempool          = new TxMempool(dateTimeProvider, new BlockPolicyEstimator(new MempoolSettings(nodeSettings), loggerFactory, nodeSettings), loggerFactory, nodeSettings);
                this.mempoolLock      = new MempoolSchedulerLock();

                // We can't make transactions until we have inputs
                // Therefore, load 100 blocks :)
                this.baseheight = 0;
                var blocks = new List <Block>();

                this.txFirst = new List <Transaction>();

                this.nonce = 0;

                for (int i = 0; i < this.blockinfo.Count; ++i)
                {
                    Block block = this.network.CreateBlock();
                    block.Header.HashPrevBlock = this.consensus.Tip.HashBlock;
                    block.Header.Version       = 1;
                    block.Header.Time          = Utils.DateTimeToUnixTime(this.ChainIndexer.Tip.GetMedianTimePast()) + 1;

                    Transaction txCoinbase = this.network.CreateTransaction();
                    txCoinbase.Version = 1;
                    txCoinbase.AddInput(new TxIn(new Script(new[] { Op.GetPushOp(this.blockinfo[i].extranonce), Op.GetPushOp(this.ChainIndexer.Height) })));
                    // Ignore the (optional) segwit commitment added by CreateNewBlock (as the hardcoded nonces don't account for this)
                    txCoinbase.AddOutput(new TxOut(Money.Zero, new Script()));
                    block.AddTransaction(txCoinbase);

                    if (this.txFirst.Count == 0)
                    {
                        this.baseheight = this.ChainIndexer.Height;
                    }

                    if (this.txFirst.Count < 4)
                    {
                        this.txFirst.Add(block.Transactions[0]);
                    }

                    block.Header.Bits = block.Header.GetWorkRequired(this.network, this.ChainIndexer.Tip);

                    block.UpdateMerkleRoot();

                    while (!block.CheckProofOfWork())
                    {
                        block.Header.Nonce = ++this.nonce;
                    }

                    // Serialization sets the BlockSize property.
                    block = Block.Load(block.ToBytes(), this.network.Consensus.ConsensusFactory);

                    var res = await this.consensus.BlockMinedAsync(block);

                    if (res == null)
                    {
                        throw new InvalidOperationException();
                    }

                    blocks.Add(block);
                }

                // Just to make sure we can still make simple blocks
                this.newBlock = AssemblerForTest(this).Build(this.ChainIndexer.Tip, this.scriptPubKey);
                Assert.NotNull(this.newBlock);
            }
Exemplo n.º 3
0
        public void MempoolSizeLimitTest()
        {
            NodeSettings settings    = NodeSettings.Default(KnownNetworks.TestNet);
            var          dateTimeSet = new DateTimeProviderSet();
            var          pool        = new TxMempool(dateTimeSet, new BlockPolicyEstimator(new MempoolSettings(settings), settings.LoggerFactory, settings), settings.LoggerFactory, settings);
            var          entry       = new TestMemPoolEntryHelper();

            entry.Priority(10.0);

            var tx1 = new Transaction();

            tx1.AddInput(new TxIn(new Script(OpcodeType.OP_1)));
            tx1.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_1, OpcodeType.OP_EQUAL)));
            pool.AddUnchecked(tx1.GetHash(), entry.Fee(10000L).FromTx(tx1, pool));

            var tx2 = new Transaction();

            tx2.AddInput(new TxIn(new Script(OpcodeType.OP_2)));
            tx2.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_2, OpcodeType.OP_EQUAL)));
            pool.AddUnchecked(tx2.GetHash(), entry.Fee(5000L).FromTx(tx2, pool));

            pool.TrimToSize(pool.DynamicMemoryUsage()); // should do nothing
            Assert.True(pool.Exists(tx1.GetHash()));
            Assert.True(pool.Exists(tx2.GetHash()));

            pool.TrimToSize(pool.DynamicMemoryUsage() * 3 / 4); // should remove the lower-feerate transaction
            Assert.True(pool.Exists(tx1.GetHash()));
            Assert.True(!pool.Exists(tx2.GetHash()));

            pool.AddUnchecked(tx2.GetHash(), entry.FromTx(tx2, pool));
            var tx3 = new Transaction();

            tx3.AddInput(new TxIn(new OutPoint(tx2.GetHash(), 0), new Script(OpcodeType.OP_2)));
            tx3.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_3, OpcodeType.OP_EQUAL)));
            pool.AddUnchecked(tx3.GetHash(), entry.Fee(20000L).FromTx(tx3, pool));

            pool.TrimToSize(pool.DynamicMemoryUsage() * 3 / 4); // tx3 should pay for tx2 (CPFP)
            Assert.True(!pool.Exists(tx1.GetHash()));
            Assert.True(pool.Exists(tx2.GetHash()));
            Assert.True(pool.Exists(tx3.GetHash()));

            pool.TrimToSize(tx1.GetVirtualSize(KnownNetworks.TestNet.Consensus.Options.WitnessScaleFactor)); // mempool is limited to tx1's size in memory usage, so nothing fits
            Assert.True(!pool.Exists(tx1.GetHash()));
            Assert.True(!pool.Exists(tx2.GetHash()));
            Assert.True(!pool.Exists(tx3.GetHash()));

            var maxFeeRateRemoved = new FeeRate(25000, tx3.GetVirtualSize(KnownNetworks.TestNet.Consensus.Options.WitnessScaleFactor) + tx2.GetVirtualSize(KnownNetworks.TestNet.Consensus.Options.WitnessScaleFactor));

            Assert.Equal(pool.GetMinFee(1).FeePerK, maxFeeRateRemoved.FeePerK + 1000);

            var tx4 = new Transaction();

            tx4.AddInput(new TxIn(new Script(OpcodeType.OP_4)));
            tx4.AddInput(new TxIn(new Script(OpcodeType.OP_4)));
            tx4.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_4, OpcodeType.OP_EQUAL)));
            tx4.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_4, OpcodeType.OP_EQUAL)));

            var tx5 = new Transaction();

            tx5.AddInput(new TxIn(new OutPoint(tx4.GetHash(), 0), new Script(OpcodeType.OP_4)));
            tx5.AddInput(new TxIn(new Script(OpcodeType.OP_5)));
            tx5.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_5, OpcodeType.OP_EQUAL)));
            tx5.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_5, OpcodeType.OP_EQUAL)));

            var tx6 = new Transaction();

            tx6.AddInput(new TxIn(new OutPoint(tx4.GetHash(), 0), new Script(OpcodeType.OP_4)));
            tx6.AddInput(new TxIn(new Script(OpcodeType.OP_6)));
            tx6.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_6, OpcodeType.OP_EQUAL)));
            tx6.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_6, OpcodeType.OP_EQUAL)));

            var tx7 = new Transaction();

            tx7.AddInput(new TxIn(new OutPoint(tx5.GetHash(), 0), new Script(OpcodeType.OP_5)));
            tx7.AddInput(new TxIn(new OutPoint(tx6.GetHash(), 0), new Script(OpcodeType.OP_6)));
            tx7.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_7, OpcodeType.OP_EQUAL)));
            tx7.AddOutput(new TxOut(new Money(10 * Money.COIN), new Script(OpcodeType.OP_7, OpcodeType.OP_EQUAL)));

            pool.AddUnchecked(tx4.GetHash(), entry.Fee(7000L).FromTx(tx4, pool));
            pool.AddUnchecked(tx5.GetHash(), entry.Fee(1000L).FromTx(tx5, pool));
            pool.AddUnchecked(tx6.GetHash(), entry.Fee(1100L).FromTx(tx6, pool));
            pool.AddUnchecked(tx7.GetHash(), entry.Fee(9000L).FromTx(tx7, pool));

            // we only require this remove, at max, 2 txn, because its not clear what we're really optimizing for aside from that
            pool.TrimToSize(pool.DynamicMemoryUsage() - 1);
            Assert.True(pool.Exists(tx4.GetHash()));
            Assert.True(pool.Exists(tx6.GetHash()));
            Assert.True(!pool.Exists(tx7.GetHash()));

            if (!pool.Exists(tx5.GetHash()))
            {
                pool.AddUnchecked(tx5.GetHash(), entry.Fee(1000L).FromTx(tx5, pool));
            }
            pool.AddUnchecked(tx7.GetHash(), entry.Fee(9000L).FromTx(tx7, pool));

            pool.TrimToSize(pool.DynamicMemoryUsage() / 2); // should maximize mempool size by only removing 5/7
            Assert.True(pool.Exists(tx4.GetHash()));
            Assert.True(!pool.Exists(tx5.GetHash()));
            Assert.True(pool.Exists(tx6.GetHash()));
            Assert.True(!pool.Exists(tx7.GetHash()));

            pool.AddUnchecked(tx5.GetHash(), entry.Fee(1000L).FromTx(tx5, pool));
            pool.AddUnchecked(tx7.GetHash(), entry.Fee(9000L).FromTx(tx7, pool));

            var vtx = new List <Transaction>();

            dateTimeSet.time = 42 + TxMempool.RollingFeeHalflife;
            Assert.Equal(pool.GetMinFee(1).FeePerK.Satoshi, maxFeeRateRemoved.FeePerK.Satoshi + 1000);
            // ... we should keep the same min fee until we get a block
            pool.RemoveForBlock(vtx, 1);
            dateTimeSet.time = 42 + 2 * +TxMempool.RollingFeeHalflife;
            Assert.Equal(pool.GetMinFee(1).FeePerK.Satoshi, (maxFeeRateRemoved.FeePerK.Satoshi + 1000) / 2);
            // ... then feerate should drop 1/2 each halflife

            dateTimeSet.time = 42 + 2 * TxMempool.RollingFeeHalflife + TxMempool.RollingFeeHalflife / 2;
            Assert.Equal(pool.GetMinFee(pool.DynamicMemoryUsage() * 5 / 2).FeePerK.Satoshi, (maxFeeRateRemoved.FeePerK.Satoshi + 1000) / 4);
            // ... with a 1/2 halflife when mempool is < 1/2 its target size

            dateTimeSet.time = 42 + 2 * TxMempool.RollingFeeHalflife + TxMempool.RollingFeeHalflife / 2 + TxMempool.RollingFeeHalflife / 4;
            Assert.Equal(pool.GetMinFee(pool.DynamicMemoryUsage() * 9 / 2).FeePerK.Satoshi, (maxFeeRateRemoved.FeePerK.Satoshi + 1000) / 8);
            // ... with a 1/4 halflife when mempool is < 1/4 its target size

            dateTimeSet.time = 42 + 7 * TxMempool.RollingFeeHalflife + TxMempool.RollingFeeHalflife / 2 + TxMempool.RollingFeeHalflife / 4;
            Assert.Equal(1000, pool.GetMinFee(1).FeePerK.Satoshi);
            // ... but feerate should never drop below 1000

            dateTimeSet.time = 42 + 8 * TxMempool.RollingFeeHalflife + TxMempool.RollingFeeHalflife / 2 + TxMempool.RollingFeeHalflife / 4;
            Assert.Equal(0, pool.GetMinFee(1).FeePerK);
            // ... unless it has gone all the way to 0 (after getting past 1000/2)
        }