public async Task ScaleDecision_ControlQueueLatency_NotMaxPollingDelay() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 10, 10, 10 }); mock.AddLatencies(0, new[] { 9999, 9999, 9999, 9999 }); // Queue was not idle, so we consider high threshold but not max polling latency for (int simulatedWorkerCount = 1; simulatedWorkerCount < 10; simulatedWorkerCount++) { PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.IsTrue(recommendation.KeepWorkersAlive); if (simulatedWorkerCount < 3) { Assert.AreEqual(ScaleAction.AddWorker, recommendation.Action); } else if (simulatedWorkerCount <= 4) { Assert.AreEqual(ScaleAction.None, recommendation.Action); } else { Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action); } } }
public async Task ScaleDecision_ControlQueueLatency_Idle2() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 1, 1, 1, 1 }); mock.AddLatencies(0, new[] { 0, 0, 1, 1 }); mock.AddLatencies(0, new[] { 0, 0, 1, 1 }); mock.AddLatencies(0, new[] { 0, 0, 1, 1 }); mock.AddLatencies(0, new[] { 0, 0, 1, 1 }); mock.AddLatencies(0, new[] { 0, 0, 1, 1 }); for (int simulatedWorkerCount = 1; simulatedWorkerCount < 10; simulatedWorkerCount++) { PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.IsTrue(recommendation.KeepWorkersAlive); if (simulatedWorkerCount > 2) { Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action); } else { Assert.AreEqual(ScaleAction.None, recommendation.Action); } } }
public async Task ScaleDecision_ControlQueueLatency_High4() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 600, 600, 600, 600 }); mock.AddLatencies(0, new[] { 700, 700, 700, 700 }); mock.AddLatencies(0, new[] { 800, 800, 800, 800 }); mock.AddLatencies(0, new[] { 900, 900, 900, 900 }); mock.AddLatencies(0, new[] { 1000, 1000, 1000, 1000 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 3); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.AddWorker, recommendation.Action, "Four hot partitions"); Assert.IsTrue(recommendation.KeepWorkersAlive); heartbeat = await mock.PulseAsync(simulatedWorkerCount : 4); recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action, "Only four hot partitions"); Assert.IsTrue(recommendation.KeepWorkersAlive); heartbeat = await mock.PulseAsync(simulatedWorkerCount : 5); recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action, "No work items and only four hot partitions"); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_ControlQueueLatency_QuickDrain() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 }); mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 }); mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 }); mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); // Something happened and we immediately drained the work-item queue for (int simulatedWorkerCount = 1; simulatedWorkerCount < 10; simulatedWorkerCount++) { PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.IsTrue(recommendation.KeepWorkersAlive); if (simulatedWorkerCount > 4) { Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action); } else { Assert.AreEqual(ScaleAction.None, recommendation.Action); } } }
private static void SetScaleRecommendation(PerformanceHeartbeat performanceHeartbeat, ScaleRecommendation scaleRecommendation) { var t = typeof(PerformanceHeartbeat); var prop = t.GetProperty("ScaleRecommendation"); prop.SetValue(performanceHeartbeat, scaleRecommendation); }
public async Task ScaleDecision_WorkItemLatency_Low() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(10, new[] { 0, 0, 0, 0 }); mock.AddLatencies(10, new[] { 0, 0, 0, 0 }); mock.AddLatencies(10, new[] { 0, 0, 0, 0 }); mock.AddLatencies(10, new[] { 0, 0, 0, 0 }); mock.AddLatencies(10, new[] { 0, 0, 0, 0 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); var random = new Random(); // Scale down for low latency is semi-random, so need to take a lot of samples var recommendations = new ScaleRecommendation[500]; for (int i = 0; i < recommendations.Length; i++) { mock.AddLatencies(random.Next(50), new[] { 0, 0, 0, 0 }); heartbeat = await mock.PulseAsync(simulatedWorkerCount : 2); recommendations[i] = heartbeat.ScaleRecommendation; } int scaleOutCount = recommendations.Count(r => r.Action == ScaleAction.AddWorker); int scaleInCount = recommendations.Count(r => r.Action == ScaleAction.RemoveWorker); int noScaleCount = recommendations.Count(r => r.Action == ScaleAction.None); int keepAliveCount = recommendations.Count(r => r.KeepWorkersAlive); Trace.TraceInformation($"Scale-out count : {scaleOutCount}."); Trace.TraceInformation($"Scale-in count : {scaleInCount}."); Trace.TraceInformation($"No-scale count : {noScaleCount}."); Trace.TraceInformation($"Keep-alive count : {keepAliveCount}."); // It is expected that we scale-in only a small percentage of the time and never scale-out. Assert.AreEqual(0, scaleOutCount); Assert.AreNotEqual(0, scaleInCount); Assert.IsTrue(noScaleCount > scaleInCount, "Should have more no-scale decisions"); Assert.IsTrue(keepAliveCount > recommendations.Length * 0.9, "Almost all should be keep-alive"); }
public async Task ScaleDecision_ControlQueueLatency_High1() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 600 }); mock.AddLatencies(0, new[] { 0, 0, 0, 700 }); mock.AddLatencies(0, new[] { 0, 0, 0, 800 }); mock.AddLatencies(0, new[] { 0, 0, 0, 900 }); mock.AddLatencies(0, new[] { 0, 0, 0, 1000 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action, "Only one hot partition"); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_WorkItemLatency_Moderate() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(500, new[] { 0, 0, 0, 0 }); mock.AddLatencies(600, new[] { 0, 0, 0, 0 }); mock.AddLatencies(700, new[] { 0, 0, 0, 0 }); mock.AddLatencies(800, new[] { 0, 0, 0, 0 }); mock.AddLatencies(900, new[] { 0, 0, 0, 0 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_ControlQueueLatency_MaxPollingDelay2() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 10000, 10000, 10000, 10000 }); mock.AddLatencies(0, new[] { 100, 100, 100, 100 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_WorkItemLatency_QuickDrain() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(30000, new[] { 0, 0, 0, 0 }); mock.AddLatencies(30000, new[] { 0, 0, 0, 0 }); mock.AddLatencies(30000, new[] { 0, 0, 0, 0 }); mock.AddLatencies(30000, new[] { 0, 0, 0, 0 }); mock.AddLatencies(3, new[] { 0, 0, 0, 0 }); // Something happened and we immediately drained the work-item queue PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_WorkItemLatency_NotMaxPollingDelay() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(10, new[] { 0, 0, 0, 0 }); mock.AddLatencies(9999, new[] { 0, 0, 0, 0 }); // Queue was not idle, so we consider high threshold but not max polling latency PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.AddWorker, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_ControlQueueLatency_MaxPollingDelay1() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 9999, 9999, 9999, 9999 }); // When queue is idle, first non-zero latency must be > max polling interval PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_ControlQueueLatency_Idle4() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 1 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action); Assert.IsFalse(recommendation.KeepWorkersAlive); }
public async Task ScaleDecision_ControlQueueLatency_NotIdle() { var mock = GetFakePerformanceMonitor(); mock.AddLatencies(0, new[] { 0, 0, 0, 1 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); for (int i = 0; i < 100; i++) { PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; // We should never scale to zero unless all control queues are idle. Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); } }
public async Task ScaleDecision_WorkItemLatency_NotIdle() { var mock = GetFakePerformanceMonitor(); for (int i = 0; i < 100; i++) { mock.AddLatencies(1, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); mock.AddLatencies(0, new[] { 0, 0, 0, 0 }); PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; // Should never scale to zero when there was a message in a queue // within the last 5 samples. Assert.AreEqual(ScaleAction.None, recommendation.Action); Assert.IsTrue(recommendation.KeepWorkersAlive); } }
internal SqlHeartbeat(long unprocessedChanges, ScaleRecommendation scaleRecommendation) { this.UnprocessedChanges = unprocessedChanges; this.ScaleRecommendation = scaleRecommendation; }
public async Task MonitorIncreasingControlQueueLoadDisconnected() { var settings = new AzureStorageOrchestrationServiceSettings() { StorageConnectionString = TestHelpers.GetTestStorageAccountConnectionString(), TaskHubName = nameof(MonitorIncreasingControlQueueLoadDisconnected), PartitionCount = 4, }; var service = new AzureStorageOrchestrationService(settings); var monitor = new DisconnectedPerformanceMonitor(settings.StorageConnectionString, settings.TaskHubName); int simulatedWorkerCount = 0; await service.CreateAsync(); // A heartbeat should come back with no recommendation since there is no data. PerformanceHeartbeat heartbeat = await monitor.PulseAsync(simulatedWorkerCount); Assert.IsNotNull(heartbeat); Assert.IsNotNull(heartbeat.ScaleRecommendation); Assert.AreEqual(ScaleAction.None, heartbeat.ScaleRecommendation.Action); Assert.IsFalse(heartbeat.ScaleRecommendation.KeepWorkersAlive); var client = new TaskHubClient(service); var previousTotalLatency = TimeSpan.Zero; for (int i = 1; i < settings.PartitionCount + 10; i++) { await client.CreateOrchestrationInstanceAsync(typeof(NoOpOrchestration), input : null); heartbeat = await monitor.PulseAsync(simulatedWorkerCount); Assert.IsNotNull(heartbeat); ScaleRecommendation recommendation = heartbeat.ScaleRecommendation; Assert.IsNotNull(recommendation); Assert.IsTrue(recommendation.KeepWorkersAlive); Assert.AreEqual(settings.PartitionCount, heartbeat.PartitionCount); Assert.AreEqual(settings.PartitionCount, heartbeat.ControlQueueLengths.Count); Assert.AreEqual(i, heartbeat.ControlQueueLengths.Sum()); Assert.AreEqual(0, heartbeat.WorkItemQueueLength); Assert.AreEqual(TimeSpan.Zero, heartbeat.WorkItemQueueLatency); TimeSpan currentTotalLatency = TimeSpan.FromTicks(heartbeat.ControlQueueLatencies.Sum(ts => ts.Ticks)); Assert.IsTrue(currentTotalLatency > previousTotalLatency); if (i + 1 < DisconnectedPerformanceMonitor.QueueLengthSampleSize) { int queuesWithNonZeroLatencies = heartbeat.ControlQueueLatencies.Count(t => t > TimeSpan.Zero); Assert.IsTrue(queuesWithNonZeroLatencies > 0 && queuesWithNonZeroLatencies <= i); int queuesWithAtLeastOneMessage = heartbeat.ControlQueueLengths.Count(l => l > 0); Assert.IsTrue(queuesWithAtLeastOneMessage > 0 && queuesWithAtLeastOneMessage <= i); ScaleAction expectedScaleAction = simulatedWorkerCount == 0 ? ScaleAction.AddWorker : ScaleAction.None; Assert.AreEqual(expectedScaleAction, recommendation.Action); } else { // Validate that control queue latencies are going up with each iteration. Assert.IsTrue(currentTotalLatency.Ticks > previousTotalLatency.Ticks); previousTotalLatency = currentTotalLatency; } Assert.AreEqual(0, heartbeat.WorkItemQueueLength); Assert.AreEqual(0.0, heartbeat.WorkItemQueueLatencyTrend); if (recommendation.Action == ScaleAction.AddWorker) { simulatedWorkerCount++; } // The high-latency threshold is 1 second Thread.Sleep(TimeSpan.FromSeconds(1.1)); } }