public void OptimalChanged_SlowerSmaller(long minSize, long maxSize, double adjustment, double tolerance) { //Sample Velocity Sample Size Action //================================================================== //Slower Smaller Ignore sample, Stable var samples = new[] { CreateSample(minSize * Math.Pow(1 + adjustment, 0), 1), CreateSample(minSize * Math.Pow(1 + adjustment, 1), 2), CreateSample(minSize * Math.Pow(1 + adjustment, 2), 3), // (optimal) CreateSample(minSize * Math.Pow(1 + adjustment, 3), 2) }; var lastSample = CreateSample(minSize * Math.Pow(1 + adjustment, 0.5), 1); // (slower smaller) var op = new ThroughputOptimizer(minSize, maxSize, adjustment, tolerance, ThroughputOptimizer.SamplingAdjustmentDirection.Increasing); samples.ForEach(s => op.RegisterSample(s.Item1, s.Item2)); Assert.AreEqual(samples.Length, op.SampleCount); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Stablized, op.AdjustmentDirection); Assert.AreEqual(samples[samples.Length - 2].Item1, op.SuggestedBatchSize); op.RegisterSample(lastSample.Item1, lastSample.Item2); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Stablized, op.AdjustmentDirection); Assert.AreEqual(samples[samples.Length - 2].Item1, op.SuggestedBatchSize); }
public void OptimalChanged_SameBigger(long minSize, long maxSize, double adjustment, double tolerance) { //Sample Velocity Sample Size Action //================================================================== //Same Bigger Keep sample, Increase var samples = new[] { CreateSample(minSize * Math.Pow(1 + adjustment, 0), 1), CreateSample(minSize * Math.Pow(1 + adjustment, 1), 2), CreateSample(minSize * Math.Pow(1 + adjustment, 2), 3), // (optimal) CreateSample(minSize * Math.Pow(1 + adjustment, 3), 2) }; var lastSample = CreateSample(minSize * Math.Pow(1 + adjustment, 4), 3); // (faster same) var op = new ThroughputOptimizer(minSize, maxSize, adjustment, tolerance, ThroughputOptimizer.SamplingAdjustmentDirection.Increasing); samples.ForEach(s => op.RegisterSample(s.Item1, s.Item2)); Assert.AreEqual(samples.Length, op.SampleCount); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Stablized, op.AdjustmentDirection); Assert.AreEqual(samples[samples.Length - 2].Item1, op.SuggestedBatchSize); op.RegisterSample(lastSample.Item1, lastSample.Item2); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Increasing, op.AdjustmentDirection); Assert.That(Math.Round(lastSample.Item1 * (1.0 + adjustment), 0), Is.EqualTo(op.SuggestedBatchSize).Within(tolerance)); }
public void FindOptimal_Decreasing(long minSize, long maxSize, double adjustment, double tolerance) { var samples = new [] { CreateSample(maxSize * Math.Pow(1 - adjustment, 0), 1), CreateSample(maxSize * Math.Pow(1 - adjustment, 1), 2), CreateSample(maxSize * Math.Pow(1 - adjustment, 2), 3), // (optimal) CreateSample(maxSize * Math.Pow(1 - adjustment, 3), 2), }; var op = new ThroughputOptimizer(minSize, maxSize, adjustment, tolerance, ThroughputOptimizer.SamplingAdjustmentDirection.Decreasing); op.RegisterSample(samples[0].Item1, samples[0].Item2); Assert.AreEqual(1, op.SampleCount); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Decreasing, op.AdjustmentDirection); op.RegisterSample(samples[1].Item1, samples[1].Item2); Assert.AreEqual(2, op.SampleCount); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Decreasing, op.AdjustmentDirection); op.RegisterSample(samples[2].Item1, samples[2].Item2); Assert.AreEqual(3, op.SampleCount); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Decreasing, op.AdjustmentDirection); op.RegisterSample(samples[3].Item1, samples[3].Item2); Assert.AreEqual(4, op.SampleCount); Assert.AreEqual(ThroughputOptimizer.SamplingAdjustmentDirection.Stablized, op.AdjustmentDirection); Assert.AreEqual(samples[2].Item1, op.SuggestedBatchSize); }
public void IgnoreSample_Simple_2(long minSize, long maxSize, double adjustment, double tolerance) { const double breach = 0.01; var op = new ThroughputOptimizer(minSize, maxSize, adjustment, tolerance, ThroughputOptimizer.SamplingAdjustmentDirection.Decreasing); op.RegisterSample((long)Math.Round(maxSize * (1 - (adjustment + tolerance + breach)).ClipTo(0, double.MaxValue), 0), TimeSpan.FromSeconds(1)); op.RegisterSample((long)Math.Round(maxSize * (1 + (adjustment + tolerance + breach)).ClipTo(0, double.MaxValue), 0), TimeSpan.FromSeconds(1)); Assert.AreEqual(op.SampleCount, 0); }
public void Increase_TwoSamples(long minSize, long maxSize, double adjustment, double tolerance) { var samples = new[] { CreateSample(minSize * Math.Pow(1 + adjustment, 0), 1), CreateSample(minSize * Math.Pow(1 + adjustment, 1), 2), }; var op = new ThroughputOptimizer(minSize, maxSize, adjustment, tolerance, ThroughputOptimizer.SamplingAdjustmentDirection.Increasing); op.RegisterSample(samples[0].Item1, samples[0].Item2); Assert.AreEqual(1, op.SampleCount); op.RegisterSample(samples[1].Item1, samples[1].Item2); Assert.AreEqual(2, op.SampleCount); Assert.AreEqual((long)Math.Round(samples[1].Item1 * (1 + adjustment), 0), op.SuggestedBatchSize); }
public void Increase_Simple(long minSize, long maxSize, double adjustment, double tolerance) { var op = new ThroughputOptimizer(minSize, maxSize, adjustment, tolerance, ThroughputOptimizer.SamplingAdjustmentDirection.Increasing); op.RegisterSample(minSize, TimeSpan.FromSeconds(1)); Assert.AreEqual(1, op.SampleCount); Assert.AreEqual((long)Math.Round(minSize * (1 + adjustment), 0), op.SuggestedBatchSize); }