Example #1
0
        public async Task <bool> ExecuteAsync(TestRunSettings testRunSettings)
        {
            if (!_pathProvider.IsFilePathValid(testRunSettings.ResultsFilePath))
            {
                throw new ArgumentException($"The specified test results file path is not valid. Specified path = {testRunSettings.ResultsFilePath}");
            }

            _pluginService.ExecuteAllTestRunnerPluginsPreTestRunLogic();

            await _testRunsCleanerServiceClient.DeleteOldTestRunsDataAsync();

            await _testCasesHistoryService.DeleteOlderTestCasesHistoryAsync();

            var activeTestAgents = await _testAgentService.GetAllActiveTestAgentsByTagAsync(testRunSettings.AgentTag);

            await _testAgentService.SetAllActiveAgentsToVerifyTheirStatusAsync(testRunSettings.AgentTag);

            await _testAgentService.WaitAllActiveAgentsToVerifyTheirStatusAsync(activeTestAgents);

            var availableTestAgents = await _testAgentService.GetAllActiveTestAgentsByTagAsync(testRunSettings.AgentTag);

            bool wasSuccessfulRun = false;

            if (availableTestAgents.Count > 0)
            {
                var tempFilePath = _pathProvider.GetTempFileName();
                _fileProvider.Delete(tempFilePath);
                _fileProvider.CreateZip(testRunSettings.OutputFilesLocation, tempFilePath);
                var zipData = _fileProvider.ReadAllBytes(tempFilePath);

                var testRunId = await _testRunProvider.CreateNewTestRunAsync(_pathProvider.GetFileName(testRunSettings.TestLibraryPath), zipData, testRunSettings.RetriesCount, testRunSettings.Threshold, testRunSettings.RunInParallel, testRunSettings.MaxParallelProcessesCount, testRunSettings.NativeArguments, testRunSettings.TestTechnology, testRunSettings.TimeBasedBalance, testRunSettings.CustomArguments);

                _testCasesProvider = _pluginService.GetNativeTestsRunnerTestCasesPluginService(testRunSettings.TestTechnology);
                var allTestCases      = _testCasesProvider.ExtractAllTestCasesFromTestLibrary(testRunSettings.TestLibraryPath);
                var filteredTestCases = _testCasesFilterService.FilterCases(allTestCases, testRunSettings.TestsFilter);

                var distributedTestsLists = testRunSettings.TimeBasedBalance ?
                                            await _testsTimesBasedDistributeService.GenerateDistributionListsAsync(availableTestAgents.Count, filteredTestCases) :
                                            _testCountsBasedDistributeService.GenerateDistributionLists(availableTestAgents.Count, filteredTestCases);

                var testAgentRuns = await _testAgentRunProvider.CreateNewTestAgentRunsAsync(testRunId, availableTestAgents, distributedTestsLists);

                try
                {
                    // TODO: pass ExecutionFrequency from args console?
                    await _testAgentRunProvider.WaitForTestAgentRunsToFinishAsync(testAgentRuns, testRunSettings.TestRunTimeout, ExecutionFrequency);

                    // DEBUG:
                    ////_consoleProvider.WriteLine("AFTER WaitForTestAgentRunsToFinishAsync");
                    _consoleProvider.WriteLine(TestAgentRunsHasFinished);
                }
                catch (TimeoutException)
                {
                    _consoleProvider.WriteLine(string.Format(TestRunHasTimedOut, testRunSettings.TestRunTimeout));
                    await _testAgentRunProvider.AbortAllTestAgentRunsInTestRunAsync(testRunId);
                }

                var areThereAbortedTestAgentRuns = await _testAgentRunProvider.AreThereAbortedTestAgentRunsAsync(testRunId);

                if (!areThereAbortedTestAgentRuns)
                {
                    // DEBUG:
                    ////_consoleProvider.WriteLine("START COMPLEETING TEST RUN");
                    await _testRunProvider.CompleteTestRunAsync(testRunId, TestRunStatus.Completed);

                    wasSuccessfulRun = true;

                    // DEBUG:
                    _consoleProvider.WriteLine("TEST RUN COMPLETED");
                    await _testResultsService.SaveTestResultsForCurrentRunAsync(testRunSettings.TestTechnology, testRunSettings.ResultsFilePath, testRunSettings.RetriedResultsFilePath, testRunId);

                    try
                    {
                        await _testRunsCleanerServiceClient.DeleteOldTestRunDataByTestRunIdAsync(testRunId);
                    }
                    catch (Exception e)
                    {
                        _consoleProvider.WriteLine(e.ToString());
                    }
                }
                else
                {
                    _consoleProvider.WriteLine("Test Run Aborted!");
                    await _testRunProvider.CompleteTestRunAsync(testRunId, TestRunStatus.Aborted);
                }
            }
            else
            {
                _consoleProvider.WriteLine(NoTestAgentsAreAvailable);
            }

            _pluginService.ExecuteAllTestRunnerPluginsPostTestRunLogic();

            return(wasSuccessfulRun);
        }
Example #2
0
        public async Task <List <string> > GenerateDistributionListsAsync(int testAgentsCount, bool sameMachineByClass, List <TestCase> testCasesToBeDistributed)
        {
            if (testAgentsCount <= 0)
            {
                throw new ArgumentException("Test Agents Count Must be Greater Than 0.");
            }

            var executedTestCases = await _testCasesHistoryService.GetExecutedTestCasesAsync(testCasesToBeDistributed);

            var executedTestCasesWithTime    = executedTestCases.Where(x => x.WasExecuted.Equals(true) && x.AvgExecutionTime != null);
            var executedTestCasesWithoutTime = executedTestCases.Where(x => x.WasExecuted.Equals(false) && x.AvgExecutionTime == null);

            // Balance tests if more than 50% of the tests have been executed at least once. Otherwise, fall back to use distribution by count.
            double executedTestsPercentage = ((double)executedTestCasesWithTime.Count() / testCasesToBeDistributed.Count()) * 100;

            _consoleProvider.WriteLine($"---> ExecutedTestsPercentage = {executedTestsPercentage}");
            if (executedTestsPercentage < FallBackPercentage)
            {
                _consoleProvider.WriteLine("---> Distribute tests using ITestsCountsBasedDistributeService");
                return(_testsCountsBasedDistributeService.GenerateDistributionLists(testAgentsCount, sameMachineByClass, testCasesToBeDistributed));
            }

            if (executedTestCasesWithoutTime.Any() && sameMachineByClass)
            {
                return(_testsCountsBasedDistributeService.GenerateDistributionLists(testAgentsCount, sameMachineByClass, testCasesToBeDistributed));
            }

            _consoleProvider.WriteLine("---> Distribute tests using ITestsTimesBasedDistributeService");
            _consoleProvider.WriteLine($"---> Total Count of Tests to be distributed = {testCasesToBeDistributed.Count}");

            var    orderedByClassTestCases = executedTestCasesWithTime.OrderBy(x => x.ClassName).ToList();
            double totalSecondsPerList     = Math.Ceiling(orderedByClassTestCases.Sum(x => x.AvgExecutionTime.Value.Milliseconds) / (double)testAgentsCount);

            _consoleProvider.WriteLine($"---> total seconds of all tests = {orderedByClassTestCases.Sum(x => x.AvgExecutionTime.Value.Milliseconds)}");
            _consoleProvider.WriteLine($"---> totalSecondsPerList = {totalSecondsPerList}");
            var distributedTestCases = new List <List <TestCase> >();

            if (totalSecondsPerList > 0)
            {
                int    distributedIndex            = 0;
                double tempDistributedTestsSeconds = totalSecondsPerList;
                string previousClass = null;
                for (int i = 0; i < orderedByClassTestCases.Count; i++)
                {
                    bool shouldResetTestsPerList = ShouldResetTestsPerList(sameMachineByClass, orderedByClassTestCases[i].ClassName, previousClass);
                    if (tempDistributedTestsSeconds <= 0 && shouldResetTestsPerList)
                    {
                        tempDistributedTestsSeconds = totalSecondsPerList;
                        distributedIndex++;
                    }

                    if (tempDistributedTestsSeconds.Equals(totalSecondsPerList))
                    {
                        distributedTestCases.Add(new List <TestCase>());
                    }

                    distributedTestCases[distributedIndex].Add(orderedByClassTestCases[i]);
                    previousClass = orderedByClassTestCases[i].ClassName;
                    tempDistributedTestsSeconds -= orderedByClassTestCases[i].AvgExecutionTime.Value.Milliseconds;
                }
            }
            else
            {
                distributedTestCases.Add(testCasesToBeDistributed);
            }

            var notDistributedTestCases = new List <List <TestCase> >();

            if (executedTestCasesWithoutTime.Any())
            {
                var notExecutedTestCases = GetTestCasesFromExecutedTestCases(executedTestCasesWithoutTime.ToList());
                notDistributedTestCases = _testsCountsBasedDistributeService.GenerateDistributionTestCasesLists(testAgentsCount, sameMachineByClass, notExecutedTestCases);
            }

            _consoleProvider.WriteLine($"---> Total Count of Tests to be NOT distributed test lists = {notDistributedTestCases.Count}");
            _consoleProvider.WriteLine($"---> Total Count of Tests to be distributed test lists = {distributedTestCases.Count}");

            // Merge time based and counts based lists.
            var distributedTestsLists = new List <string>();

            for (int i = 0; i < testAgentsCount; i++)
            {
                var mergedTestCasesList = distributedTestCases[i];

                if (notDistributedTestCases.Count > i && notDistributedTestCases[i] != null && notDistributedTestCases[i].Any())
                {
                    _consoleProvider.WriteLine($"---> NOT distributed list number = {i} added");
                    mergedTestCasesList.AddRange(notDistributedTestCases[i]);
                }

                distributedTestsLists.Add(_jsonSerializer.Serialize(mergedTestCasesList));
            }

            return(distributedTestsLists);
        }
Example #3
0
        public List <string> GenerateDistributionLists(int testAgentsCount, bool sameMachineByClass, List <TestCase> testCasesToBeDistributed)
        {
            if (testAgentsCount <= 0)
            {
                throw new ArgumentException("Test Agents Count Must be Greater Than 0.");
            }

            var executedTestCases         = _testCasesHistoryService.GetExecutedTestCasesAsync(testCasesToBeDistributed).Result;
            var executedTestCasesWithTime =
                executedTestCases.Where(x => x.WasExecuted.Equals(true) && x.AvgExecutionTime != null);
            var executedTestCasesWithoutTime =
                executedTestCases.Where(x => x.WasExecuted.Equals(false) && x.AvgExecutionTime == null);

            // Balance tests if more than 50% of the tests have been executed at least once. Otherwise, fall back to use distribution by count.
            var testCasesWithTime       = executedTestCasesWithTime.ToList();
            var executedTestsPercentage = (double)testCasesWithTime.Count / testCasesToBeDistributed.Count * 100;

            _consoleProvider.WriteLine($"## Executed  tests % = {executedTestsPercentage}");
            if (executedTestsPercentage < FallBackPercentage)
            {
                _consoleProvider.WriteLine("## Distribute tests using COUNT BASED");
                return(_testsCountsBasedDistributeService.GenerateDistributionLists(testAgentsCount, sameMachineByClass, testCasesToBeDistributed));
            }

            var testCasesWithoutTime = executedTestCasesWithoutTime.ToList();

            if (testCasesWithoutTime.Any() && sameMachineByClass)
            {
                return(_testsCountsBasedDistributeService.GenerateDistributionLists(testAgentsCount, sameMachineByClass, testCasesToBeDistributed));
            }

            _consoleProvider.WriteLine("## Distribute tests using TIME BASED");
            _consoleProvider.WriteLine($"## Total Count of tests to be distributed = {testCasesToBeDistributed.Count}");

            var orderedByClassTestCases = testCasesWithTime.OrderBy(x => x.ClassName).ToList();
            var totalSecondsPerList     = Math.Ceiling(orderedByClassTestCases.Sum(x =>
            {
                if (x.AvgExecutionTime != null)
                {
                    return(x.AvgExecutionTime.Value.Milliseconds);
                }

                return(0);
            }) / (double)testAgentsCount);

            _consoleProvider.WriteLine(
                $"## Total seconds of all tests = {orderedByClassTestCases.Sum(x => x.AvgExecutionTime.Value.Milliseconds)}");
            _consoleProvider.WriteLine($"Total tests per list = {totalSecondsPerList}");

            var distributedTestCases = new List <List <TestCase> >();

            if (totalSecondsPerList > 0)
            {
                var    distributedIndex            = 0;
                var    tempDistributedTestsSeconds = totalSecondsPerList;
                string previousClass = null;
                bool   isListReset   = false;
                for (var i = 0; i < orderedByClassTestCases.Count; i++)
                {
                    bool shouldResetTestsPerList = ShouldResetTestsPerList(sameMachineByClass, orderedByClassTestCases[i].ClassName, previousClass);
                    if (!isListReset && shouldResetTestsPerList)
                    {
                        isListReset = true;
                    }

                    if (tempDistributedTestsSeconds <= 0 && isListReset)
                    {
                        tempDistributedTestsSeconds = totalSecondsPerList;
                        distributedIndex++;
                        isListReset = false;
                    }

                    if (tempDistributedTestsSeconds.Equals(totalSecondsPerList))
                    {
                        distributedTestCases.Add(new List <TestCase>());
                    }

                    distributedTestCases[distributedIndex].Add(orderedByClassTestCases[i]);
                    previousClass = orderedByClassTestCases[i].ClassName;
                    var avgExecutionTime = orderedByClassTestCases[i].AvgExecutionTime;
                    if (avgExecutionTime != null)
                    {
                        tempDistributedTestsSeconds -= avgExecutionTime.Value.Milliseconds;
                    }
                }
            }
            else
            {
                distributedTestCases.Add(testCasesToBeDistributed);
            }

            var notDistributedTestCases = new List <List <TestCase> >();

            if (testCasesWithoutTime.Any())
            {
                var notExecutedTestCases = GetTestCasesFromExecutedTestCases(testCasesWithoutTime.ToList());
                notDistributedTestCases = _testsCountsBasedDistributeService.GenerateDistributionTestCasesLists(testAgentsCount, sameMachineByClass, notExecutedTestCases);
            }

            _consoleProvider.WriteLine($"## Total count of tests NOT distributed = {notDistributedTestCases.Count}");
            _consoleProvider.WriteLine($"## Total count of tests to be distributed = {distributedTestCases.Count}");

            // Merge time based and counts based lists.
            var distributedTestsLists = new List <string>();

            for (var i = 0; i < testAgentsCount; i++)
            {
                if (distributedTestCases.Count == i)
                {
                    break;
                }

                var mergedTestCasesList = distributedTestCases[i];

                if (notDistributedTestCases.Count > i && notDistributedTestCases[i] != null &&
                    notDistributedTestCases[i].Any())
                {
                    mergedTestCasesList.AddRange(notDistributedTestCases[i]);
                }

                distributedTestsLists.Add(_jsonSerializer.Serialize(mergedTestCasesList));
            }

            return(distributedTestsLists);
        }