Beispiel #1
0
        public async Task ScaleDecision_ControlQueueLatency_Idle2()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 1, 1, 1, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 1, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 1, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 1, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 1, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 1, 1 });

            for (int simulatedWorkerCount = 1; simulatedWorkerCount < 10; simulatedWorkerCount++)
            {
                PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount);

                ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;
                Assert.IsTrue(recommendation.KeepWorkersAlive);

                if (simulatedWorkerCount > 2)
                {
                    Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action);
                }
                else
                {
                    Assert.AreEqual(ScaleAction.None, recommendation.Action);
                }
            }
        }
Beispiel #2
0
        public async Task ScaleDecision_ControlQueueLatency_High4()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 600, 600, 600, 600 });
            mock.AddLatencies(0, new[] { 700, 700, 700, 700 });
            mock.AddLatencies(0, new[] { 800, 800, 800, 800 });
            mock.AddLatencies(0, new[] { 900, 900, 900, 900 });
            mock.AddLatencies(0, new[] { 1000, 1000, 1000, 1000 });

            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 3);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.AddWorker, recommendation.Action, "Four hot partitions");
            Assert.IsTrue(recommendation.KeepWorkersAlive);

            heartbeat = await mock.PulseAsync(simulatedWorkerCount : 4);

            recommendation = heartbeat.ScaleRecommendation;
            Assert.AreEqual(ScaleAction.None, recommendation.Action, "Only four hot partitions");
            Assert.IsTrue(recommendation.KeepWorkersAlive);

            heartbeat = await mock.PulseAsync(simulatedWorkerCount : 5);

            recommendation = heartbeat.ScaleRecommendation;
            Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action, "No work items and only four hot partitions");
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
        public async Task <DurableTaskTriggerMetrics> GetMetricsAsync()
        {
            DurableTaskTriggerMetrics metrics = new DurableTaskTriggerMetrics();

            // Durable stores its own metrics, so we just collect them here
            PerformanceHeartbeat heartbeat = null;

            try
            {
                DisconnectedPerformanceMonitor performanceMonitor = this.GetPerformanceMonitor();
                heartbeat = await performanceMonitor.PulseAsync();
            }
            catch (StorageException e)
            {
                this.traceHelper.ExtensionWarningEvent(this.hubName, this.functionName.Name, string.Empty, e.ToString());
            }

            if (heartbeat != null)
            {
                metrics.PartitionCount        = heartbeat.PartitionCount;
                metrics.ControlQueueLengths   = JsonConvert.SerializeObject(heartbeat.ControlQueueLengths);
                metrics.ControlQueueLatencies = JsonConvert.SerializeObject(heartbeat.ControlQueueLatencies);
                metrics.WorkItemQueueLength   = heartbeat.WorkItemQueueLength;
                if (heartbeat.WorkItemQueueLatency != null)
                {
                    metrics.WorkItemQueueLatency = heartbeat.WorkItemQueueLatency.ToString();
                }
            }

            return(metrics);
        }
Beispiel #4
0
        public async Task ScaleDecision_ControlQueueLatency_NotMaxPollingDelay()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 10, 10, 10 });
            mock.AddLatencies(0, new[] { 9999, 9999, 9999, 9999 });

            // Queue was not idle, so we consider high threshold but not max polling latency
            for (int simulatedWorkerCount = 1; simulatedWorkerCount < 10; simulatedWorkerCount++)
            {
                PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount);

                ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;
                Assert.IsTrue(recommendation.KeepWorkersAlive);

                if (simulatedWorkerCount < 3)
                {
                    Assert.AreEqual(ScaleAction.AddWorker, recommendation.Action);
                }
                else if (simulatedWorkerCount <= 4)
                {
                    Assert.AreEqual(ScaleAction.None, recommendation.Action);
                }
                else
                {
                    Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action);
                }
            }
        }
Beispiel #5
0
        public async Task ScaleDecision_ControlQueueLatency_QuickDrain()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 });
            mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 });
            mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 });
            mock.AddLatencies(0, new[] { 30000, 30000, 30000, 30000 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });

            // Something happened and we immediately drained the work-item queue
            for (int simulatedWorkerCount = 1; simulatedWorkerCount < 10; simulatedWorkerCount++)
            {
                PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount);

                ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;
                Assert.IsTrue(recommendation.KeepWorkersAlive);

                if (simulatedWorkerCount > 4)
                {
                    Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action);
                }
                else
                {
                    Assert.AreEqual(ScaleAction.None, recommendation.Action);
                }
            }
        }
        private static void SetScaleRecommendation(PerformanceHeartbeat performanceHeartbeat,
                                                   ScaleRecommendation scaleRecommendation)
        {
            var t    = typeof(PerformanceHeartbeat);
            var prop = t.GetProperty("ScaleRecommendation");

            prop.SetValue(performanceHeartbeat, scaleRecommendation);
        }
Beispiel #7
0
        public void GetScaleStatus_HandlesMalformedMetrics()
        {
            // Null metrics
            var context = new ScaleStatusContext <DurableTaskTriggerMetrics>
            {
                WorkerCount = 1,
                Metrics     = null,
            };

            var recommendation = this.scaleMonitor.GetScaleStatus(context);

            Assert.Equal(ScaleVote.None, recommendation.Vote);

            // Empty metrics
            var heartbeats = new PerformanceHeartbeat[0];

            context.Metrics = new DurableTaskTriggerMetrics[0];

            this.performanceMonitor
            .Setup(m => m.MakeScaleRecommendation(1, heartbeats))
            .Returns <ScaleRecommendation>(null);

            recommendation = this.scaleMonitor.GetScaleStatus(context);

            Assert.Equal(ScaleVote.None, recommendation.Vote);

            // Metrics with null properties
            var metrics = new DurableTaskTriggerMetrics[5];

            for (int i = 0; i < metrics.Length; ++i)
            {
                metrics[i] = new DurableTaskTriggerMetrics();
            }

            context.Metrics = metrics;

            heartbeats = new PerformanceHeartbeat[5];
            for (int i = 0; i < heartbeats.Length; ++i)
            {
                heartbeats[i] = new PerformanceHeartbeat
                {
                    ControlQueueLengths   = new List <int>(),
                    ControlQueueLatencies = new List <TimeSpan>(),
                };
            }

            this.performanceMonitor
            .Setup(m => m.MakeScaleRecommendation(1, this.MatchEquivalentHeartbeats(heartbeats)))
            .Returns <ScaleRecommendation>(null);

            recommendation = this.scaleMonitor.GetScaleStatus(context);

            Assert.Equal(ScaleVote.None, recommendation.Vote);
        }
Beispiel #8
0
        public async Task MonitorIdleTaskHubDisconnected()
        {
            var settings = new AzureStorageOrchestrationServiceSettings
            {
                StorageConnectionString = TestHelpers.GetTestStorageAccountConnectionString(),
                TaskHubName             = nameof(MonitorIdleTaskHubDisconnected),
                PartitionCount          = 4,
            };

            var service = new AzureStorageOrchestrationService(settings);
            var monitor = new DisconnectedPerformanceMonitor(settings.StorageConnectionString, settings.TaskHubName);

            await service.DeleteAsync();

            // A null heartbeat is expected when the task hub does not exist.
            PerformanceHeartbeat heartbeat = await monitor.PulseAsync(currentWorkerCount : 0);

            Assert.IsNull(heartbeat);

            await service.CreateAsync();

            ScaleRecommendation recommendation;

            for (int i = 0; i < 10; i++)
            {
                heartbeat = await monitor.PulseAsync(currentWorkerCount : 0);

                Assert.IsNotNull(heartbeat);
                Assert.AreEqual(settings.PartitionCount, heartbeat.PartitionCount);
                Assert.AreEqual(settings.PartitionCount, heartbeat.ControlQueueLengths.Count);
                Assert.AreEqual(settings.PartitionCount, heartbeat.ControlQueueLatencies.Count);
                Assert.AreEqual(0, heartbeat.ControlQueueLengths.Count(l => l != 0));
                Assert.AreEqual(0, heartbeat.ControlQueueLatencies.Count(l => l != TimeSpan.Zero));
                Assert.AreEqual(0, heartbeat.WorkItemQueueLength);
                Assert.AreEqual(0.0, heartbeat.WorkItemQueueLatencyTrend);
                Assert.AreEqual(TimeSpan.Zero, heartbeat.WorkItemQueueLatency);

                recommendation = heartbeat.ScaleRecommendation;
                Assert.IsNotNull(recommendation);
                Assert.AreEqual(ScaleAction.None, recommendation.Action);
                Assert.AreEqual(false, recommendation.KeepWorkersAlive);
                Assert.IsNotNull(recommendation.Reason);
            }

            // If any workers are assigned, the recommendation should be to have them removed.
            heartbeat = await monitor.PulseAsync(currentWorkerCount : 1);

            recommendation = heartbeat.ScaleRecommendation;
            Assert.IsNotNull(recommendation);
            Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action);
            Assert.AreEqual(false, recommendation.KeepWorkersAlive);
            Assert.IsNotNull(recommendation.Reason);
        }
            public ServiceFixture(ScaleAction action, int currentWorkerCount, bool inputKeepWorkersAlive = true)
            {
                _kubernetesRepositoryMock         = new Mock <IKubernetesRepository>();
                _performanceMonitorRepositoryMock = new Mock <IPerformanceMonitorRepository>();

                var performanceHeartbeat = new PerformanceHeartbeat();
                var scaleRecommendation  = CreateScaleRecommendation(action, inputKeepWorkersAlive, "baz");

                SetScaleRecommendation(performanceHeartbeat, scaleRecommendation);
                _performanceMonitorRepositoryMock.Setup(p => p.PulseAsync(currentWorkerCount)).ReturnsAsync(performanceHeartbeat);
                _kubernetesRepositoryMock.Setup(p => p.GetNumberOfPodAsync(ExpectedName, ExpectedNamespace)).ReturnsAsync(currentWorkerCount);
                _loggerMock = new Mock <ILogger <ExternalScalerService> >();
            }
Beispiel #10
0
            public override async Task <PerformanceHeartbeat> PulseAsync(int simulatedWorkerCount)
            {
                Trace.TraceInformation(
                    "PULSE INPUT: Worker count: {0}; work items: {1}; control items: {2}  {3}.",
                    simulatedWorkerCount,
                    this.WorkItemQueueLatencies,
                    Environment.NewLine,
                    string.Join(Environment.NewLine + "  ", this.ControlQueueLatencies));

                PerformanceHeartbeat heartbeat = await base.PulseAsync(simulatedWorkerCount);

                Trace.TraceInformation($"PULSE OUTPUT: {heartbeat}");
                return(heartbeat);
            }
Beispiel #11
0
        private void GetCorrespondingHeartbeatsAndMetrics(out PerformanceHeartbeat[] heartbeats, out DurableTaskTriggerMetrics[] metrics)
        {
            heartbeats = new PerformanceHeartbeat[]
            {
                new PerformanceHeartbeat
                {
                    PartitionCount      = 4,
                    ControlQueueLengths = new List <int> {
                        1, 2, 3, 4
                    },
                    ControlQueueLatencies = new List <TimeSpan> {
                        TimeSpan.FromMilliseconds(1), TimeSpan.FromMilliseconds(2), TimeSpan.FromMilliseconds(3), TimeSpan.FromMilliseconds(4),
                    },
                    WorkItemQueueLength  = 5,
                    WorkItemQueueLatency = TimeSpan.FromMilliseconds(6),
                },
                new PerformanceHeartbeat
                {
                    PartitionCount      = 7,
                    ControlQueueLengths = new List <int> {
                        8, 9, 10, 11
                    },
                    ControlQueueLatencies = new List <TimeSpan> {
                        TimeSpan.FromMilliseconds(12), TimeSpan.FromMilliseconds(13), TimeSpan.FromMilliseconds(14), TimeSpan.FromMilliseconds(15),
                    },
                    WorkItemQueueLength  = 16,
                    WorkItemQueueLatency = TimeSpan.FromMilliseconds(17),
                },
            };

            metrics = new DurableTaskTriggerMetrics[]
            {
                new DurableTaskTriggerMetrics
                {
                    PartitionCount        = 4,
                    ControlQueueLengths   = "[1,2,3,4]",
                    ControlQueueLatencies = "[\"00:00:00.0010000\",\"00:00:00.0020000\",\"00:00:00.0030000\",\"00:00:00.0040000\"]",
                    WorkItemQueueLength   = 5,
                    WorkItemQueueLatency  = "00:00:00.0060000",
                },
                new DurableTaskTriggerMetrics
                {
                    PartitionCount        = 7,
                    ControlQueueLengths   = "[8,9,10,11]",
                    ControlQueueLatencies = "[\"00:00:00.0120000\",\"00:00:00.0130000\",\"00:00:00.0140000\",\"00:00:00.0150000\"]",
                    WorkItemQueueLength   = 16,
                    WorkItemQueueLatency  = "00:00:00.0170000",
                },
            };
        }
Beispiel #12
0
        public async Task ScaleDecision_WorkItemLatency_Low()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(10, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(10, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(10, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(10, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(10, new[] { 0, 0, 0, 0 });

            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.None, recommendation.Action);
            Assert.IsTrue(recommendation.KeepWorkersAlive);

            var random = new Random();

            // Scale down for low latency is semi-random, so need to take a lot of samples
            var recommendations = new ScaleRecommendation[500];

            for (int i = 0; i < recommendations.Length; i++)
            {
                mock.AddLatencies(random.Next(50), new[] { 0, 0, 0, 0 });

                heartbeat = await mock.PulseAsync(simulatedWorkerCount : 2);

                recommendations[i] = heartbeat.ScaleRecommendation;
            }

            int scaleOutCount  = recommendations.Count(r => r.Action == ScaleAction.AddWorker);
            int scaleInCount   = recommendations.Count(r => r.Action == ScaleAction.RemoveWorker);
            int noScaleCount   = recommendations.Count(r => r.Action == ScaleAction.None);
            int keepAliveCount = recommendations.Count(r => r.KeepWorkersAlive);

            Trace.TraceInformation($"Scale-out count  : {scaleOutCount}.");
            Trace.TraceInformation($"Scale-in count   : {scaleInCount}.");
            Trace.TraceInformation($"No-scale count   : {noScaleCount}.");
            Trace.TraceInformation($"Keep-alive count : {keepAliveCount}.");

            // It is expected that we scale-in only a small percentage of the time and never scale-out.
            Assert.AreEqual(0, scaleOutCount);
            Assert.AreNotEqual(0, scaleInCount);
            Assert.IsTrue(noScaleCount > scaleInCount, "Should have more no-scale decisions");
            Assert.IsTrue(keepAliveCount > recommendations.Length * 0.9, "Almost all should be keep-alive");
        }
Beispiel #13
0
        public async Task ScaleDecision_ControlQueueLatency_MaxPollingDelay2()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 10000, 10000, 10000, 10000 });
            mock.AddLatencies(0, new[] { 100, 100, 100, 100 });

            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.None, recommendation.Action);
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
Beispiel #14
0
        public async Task ScaleDecision_WorkItemLatency_Moderate()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(500, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(600, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(700, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(800, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(900, new[] { 0, 0, 0, 0 });

            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.None, recommendation.Action);
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
Beispiel #15
0
        public async Task ScaleDecision_ControlQueueLatency_High1()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 600 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 700 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 800 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 900 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 1000 });

            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.None, recommendation.Action, "Only one hot partition");
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
Beispiel #16
0
        public async Task ScaleDecision_ControlQueueLatency_MaxPollingDelay1()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 9999, 9999, 9999, 9999 });

            // When queue is idle, first non-zero latency must be > max polling interval
            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.None, recommendation.Action);
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
Beispiel #17
0
        public async Task ScaleDecision_ControlQueueLatency_Idle4()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });

            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.RemoveWorker, recommendation.Action);
            Assert.IsFalse(recommendation.KeepWorkersAlive);
        }
Beispiel #18
0
        public async Task ScaleDecision_WorkItemLatency_NotMaxPollingDelay()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(10, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(9999, new[] { 0, 0, 0, 0 });

            // Queue was not idle, so we consider high threshold but not max polling latency
            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.AddWorker, recommendation.Action);
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
Beispiel #19
0
        public async Task ScaleDecision_WorkItemLatency_QuickDrain()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(30000, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(30000, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(30000, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(30000, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(3, new[] { 0, 0, 0, 0 });

            // Something happened and we immediately drained the work-item queue
            PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

            ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

            Assert.AreEqual(ScaleAction.None, recommendation.Action);
            Assert.IsTrue(recommendation.KeepWorkersAlive);
        }
Beispiel #20
0
        public async Task ScaleDecision_ControlQueueLatency_NotIdle()
        {
            var mock = GetFakePerformanceMonitor();

            mock.AddLatencies(0, new[] { 0, 0, 0, 1 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
            mock.AddLatencies(0, new[] { 0, 0, 0, 0 });

            for (int i = 0; i < 100; i++)
            {
                PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

                ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

                // We should never scale to zero unless all control queues are idle.
                Assert.AreEqual(ScaleAction.None, recommendation.Action);
                Assert.IsTrue(recommendation.KeepWorkersAlive);
            }
        }
Beispiel #21
0
        public async Task ScaleDecision_WorkItemLatency_NotIdle()
        {
            var mock = GetFakePerformanceMonitor();

            for (int i = 0; i < 100; i++)
            {
                mock.AddLatencies(1, new[] { 0, 0, 0, 0 });
                mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
                mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
                mock.AddLatencies(0, new[] { 0, 0, 0, 0 });
                mock.AddLatencies(0, new[] { 0, 0, 0, 0 });

                PerformanceHeartbeat heartbeat = await mock.PulseAsync(simulatedWorkerCount : 1);

                ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;

                // Should never scale to zero when there was a message in a queue
                // within the last 5 samples.
                Assert.AreEqual(ScaleAction.None, recommendation.Action);
                Assert.IsTrue(recommendation.KeepWorkersAlive);
            }
        }
        private ScaleStatus GetScaleStatusCore(int workerCount, DurableTaskTriggerMetrics[] metrics)
        {
            var scaleStatus = new ScaleStatus()
            {
                Vote = ScaleVote.None
            };

            if (metrics == null)
            {
                return(scaleStatus);
            }

            var heartbeats = new PerformanceHeartbeat[metrics.Length];

            for (int i = 0; i < metrics.Length; ++i)
            {
                TimeSpan workItemQueueLatency;
                bool     parseResult = TimeSpan.TryParse(metrics[i].WorkItemQueueLatency, out workItemQueueLatency);

                heartbeats[i] = new PerformanceHeartbeat()
                {
                    PartitionCount       = metrics[i].PartitionCount,
                    WorkItemQueueLatency = parseResult ? workItemQueueLatency : TimeSpan.FromMilliseconds(0),
                    WorkItemQueueLength  = metrics[i].WorkItemQueueLength,
                };

                if (metrics[i].ControlQueueLengths == null)
                {
                    heartbeats[i].ControlQueueLengths = new List <int>();
                }
                else
                {
                    heartbeats[i].ControlQueueLengths = JsonConvert.DeserializeObject <IReadOnlyList <int> >(metrics[i].ControlQueueLengths);
                }

                if (metrics[i].ControlQueueLatencies == null)
                {
                    heartbeats[i].ControlQueueLatencies = new List <TimeSpan>();
                }
                else
                {
                    heartbeats[i].ControlQueueLatencies = JsonConvert.DeserializeObject <IReadOnlyList <TimeSpan> >(metrics[i].ControlQueueLatencies);
                }
            }

            DisconnectedPerformanceMonitor performanceMonitor = this.GetPerformanceMonitor();
            var scaleRecommendation = performanceMonitor.MakeScaleRecommendation(workerCount, heartbeats.ToArray());

            bool writeToUserLogs = false;

            switch (scaleRecommendation?.Action)
            {
            case ScaleAction.AddWorker:
                scaleStatus.Vote = ScaleVote.ScaleOut;
                writeToUserLogs  = true;
                break;

            case ScaleAction.RemoveWorker:
                scaleStatus.Vote = ScaleVote.ScaleIn;
                writeToUserLogs  = true;
                break;

            default:
                scaleStatus.Vote = ScaleVote.None;
                break;
            }

            this.traceHelper.ExtensionInformationalEvent(
                this.hubName,
                string.Empty,
                this.functionName.Name,
                $"Durable Functions Trigger Scale Decision: {scaleStatus.Vote.ToString()}, Reason: {scaleRecommendation?.Reason}",
                writeToUserLogs: writeToUserLogs);

            return(scaleStatus);
        }
Beispiel #23
0
        public async Task MonitorIncreasingControlQueueLoadDisconnected()
        {
            var settings = new AzureStorageOrchestrationServiceSettings()
            {
                StorageConnectionString = TestHelpers.GetTestStorageAccountConnectionString(),
                TaskHubName             = nameof(MonitorIncreasingControlQueueLoadDisconnected),
                PartitionCount          = 4,
            };

            var service = new AzureStorageOrchestrationService(settings);

            var monitor = new DisconnectedPerformanceMonitor(settings.StorageConnectionString, settings.TaskHubName);
            int simulatedWorkerCount = 0;
            await service.CreateAsync();

            // A heartbeat should come back with no recommendation since there is no data.
            PerformanceHeartbeat heartbeat = await monitor.PulseAsync(simulatedWorkerCount);

            Assert.IsNotNull(heartbeat);
            Assert.IsNotNull(heartbeat.ScaleRecommendation);
            Assert.AreEqual(ScaleAction.None, heartbeat.ScaleRecommendation.Action);
            Assert.IsFalse(heartbeat.ScaleRecommendation.KeepWorkersAlive);

            var client = new TaskHubClient(service);
            var previousTotalLatency = TimeSpan.Zero;

            for (int i = 1; i < settings.PartitionCount + 10; i++)
            {
                await client.CreateOrchestrationInstanceAsync(typeof(NoOpOrchestration), input : null);

                heartbeat = await monitor.PulseAsync(simulatedWorkerCount);

                Assert.IsNotNull(heartbeat);

                ScaleRecommendation recommendation = heartbeat.ScaleRecommendation;
                Assert.IsNotNull(recommendation);
                Assert.IsTrue(recommendation.KeepWorkersAlive);

                Assert.AreEqual(settings.PartitionCount, heartbeat.PartitionCount);
                Assert.AreEqual(settings.PartitionCount, heartbeat.ControlQueueLengths.Count);
                Assert.AreEqual(i, heartbeat.ControlQueueLengths.Sum());
                Assert.AreEqual(0, heartbeat.WorkItemQueueLength);
                Assert.AreEqual(TimeSpan.Zero, heartbeat.WorkItemQueueLatency);

                TimeSpan currentTotalLatency = TimeSpan.FromTicks(heartbeat.ControlQueueLatencies.Sum(ts => ts.Ticks));
                Assert.IsTrue(currentTotalLatency > previousTotalLatency);

                if (i + 1 < DisconnectedPerformanceMonitor.QueueLengthSampleSize)
                {
                    int queuesWithNonZeroLatencies = heartbeat.ControlQueueLatencies.Count(t => t > TimeSpan.Zero);
                    Assert.IsTrue(queuesWithNonZeroLatencies > 0 && queuesWithNonZeroLatencies <= i);

                    int queuesWithAtLeastOneMessage = heartbeat.ControlQueueLengths.Count(l => l > 0);
                    Assert.IsTrue(queuesWithAtLeastOneMessage > 0 && queuesWithAtLeastOneMessage <= i);

                    ScaleAction expectedScaleAction = simulatedWorkerCount == 0 ? ScaleAction.AddWorker : ScaleAction.None;
                    Assert.AreEqual(expectedScaleAction, recommendation.Action);
                }
                else
                {
                    // Validate that control queue latencies are going up with each iteration.
                    Assert.IsTrue(currentTotalLatency.Ticks > previousTotalLatency.Ticks);
                    previousTotalLatency = currentTotalLatency;
                }

                Assert.AreEqual(0, heartbeat.WorkItemQueueLength);
                Assert.AreEqual(0.0, heartbeat.WorkItemQueueLatencyTrend);

                if (recommendation.Action == ScaleAction.AddWorker)
                {
                    simulatedWorkerCount++;
                }

                // The high-latency threshold is 1 second
                Thread.Sleep(TimeSpan.FromSeconds(1.1));
            }
        }