public static async Task <CloudBornWebTestRunContext> InitTestEnvironment( TestRunSettings testRunSettings, TrafficSource trafficSource) { // Init the environment only once in test run if (testRunContext != null) { return(testRunContext); } var connectionInfo = ConnectionInfoReader.GetConnectionInfo(testRunSettings.Environment); var webClientBuilder = new CloudBornWebClientBuilder(connectionInfo, trafficSource); CloudBornWebClient webClient = webClientBuilder.Create(); await HttpClientUtils.WaitForService( async() => { await webClient.GetHealth().ConfigureAwait(false); }, connectionInfo.RetryOnFailedConnection).ConfigureAwait(false); testRunContext = new CloudBornWebTestRunContext(connectionInfo, webClient, webClientBuilder); return(testRunContext); }
public SetupCreator(TestRunSettings testRunSettings) { screenshotPath = testRunSettings.ScreenshotPath; screenshotUrl = testRunSettings.ScreenshotUrl; appendBuildNumberToPath = testRunSettings.AppendBuildNumberToPath.ToString(); fileList = ReadFilesList(testRunSettings.FilesListPath); }
private static int Main(string[] args) { XmlConfigurator.Configure(); var log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); log.Info("Started"); if (args == null || args.Length == 0) { throw new ArgumentException("No arguments were provided."); } if (args[0] == null) { throw new ArgumentException("Test assembly path was not set."); } if (args[1] == null) { throw new ArgumentException("Environment (Stable, Ref, Live) was not set."); } var testEnvironment = args[1]; var testAssemblyPath = args[0]; if (!File.Exists(testAssemblyPath)) { throw new ArgumentException(string.Format("Given test assembly path is not available or does not exist: {0}.", testAssemblyPath)); } var environment = (TestEnvironment)Enum.Parse(typeof(TestEnvironment), testEnvironment, true); var globalSettings = new GlobalSettings(); var testRunSettings = new TestRunSettings(); var settings = ConfigReader.Settings(); globalSettings.DeliverySystemCmd = settings.ContinuousIntegrationSystemCmd; globalSettings.MachineConfigs = settings.MachineConfigs; globalSettings.NoOfParallelProcesses = settings.NoOfParallelRun; globalSettings.ReRunFailedTests = settings.ReRunFailedTests; globalSettings.ReRunFailedTestsLimit = settings.ReRunFailedTestsLimit; globalSettings.SeleniumNodes = settings.SeleniumClients; globalSettings.TestEnvironment = environment; globalSettings.NUnitArgs = settings.NUnitArgs; testRunSettings.AppendBuildNumberToPath = settings.AppendBuildNumberToPath; testRunSettings.FilesListPath = @"fileList.txt"; testRunSettings.ScreenshotPath = settings.ScreenshotLocation; testRunSettings.ScreenshotUrl = settings.ScreenshotUrl; var manager = new Manager(testRunSettings, globalSettings); return(manager.Manage(testAssemblyPath, environment)); }
public Manager(TestRunSettings testRunSettings, GlobalSettings globalSettings) { this.globalSettings = globalSettings; hostFileSwitcher = new HostFileSwitcher(); splitter = new Splitter(); setupCreator = new SetupCreator(testRunSettings); resultMerger = new ResultMerger(); nodesManager = new NodesManager(globalSettings.NoOfParallelProcesses); resultPrinter = new ResultPrinter(globalSettings.DeliverySystemCmd); nodesManager.InitTestConfigs(globalSettings.SeleniumNodes); nUnitArgs = globalSettings.NUnitArgs; }
/// <summary> /// Ensures that an xml element corresponding to the test run settings exists in the setting document. /// </summary> /// <param name="settings">settings</param> /// <param name="settingsNode">settingsNode</param> private static void EnsureSettingsNode(XmlDocument settings, TestRunSettings settingsNode) { Debug.Assert(settingsNode != null, "Invalid Settings Node"); Debug.Assert(settings != null, "Invalid Settings"); var root = settings.DocumentElement; if (root[settingsNode.Name] == null) { var newElement = settingsNode.ToXml(); XmlNode newNode = settings.ImportNode(newElement, true); root.AppendChild(newNode); } }
private static int ExecuteTestRun(RunnerModeOptions runnerModeOptions) { var serverUri = runnerModeOptions.TestServerUrl != null ? new Uri(runnerModeOptions.TestServerUrl) : LoadSavedTestExecutionServerUrl(); if (serverUri == null) { Console.WriteLine("You need to specify a test server URL."); return(0); } DateTime startTime = DateTime.Now; Console.WriteLine("Test Execution started...."); var testRunSettings = new TestRunSettings { ResultsFilePath = runnerModeOptions.ResultsFilePath, OutputFilesLocation = runnerModeOptions.OutputFilesLocation, TestsFilter = runnerModeOptions.TestsFilter, TestLibraryPath = runnerModeOptions.TestLibraryPath, AgentTag = runnerModeOptions.AgentTag, RunInParallel = runnerModeOptions.RunInParallel, MaxParallelProcessesCount = runnerModeOptions.MaxParallelProcessesCount, SameMachineByClass = runnerModeOptions.SameMachineByClass, TimeBasedBalance = runnerModeOptions.TimeBasedBalance, NativeArguments = runnerModeOptions.NativeArguments, TestRunTimeout = runnerModeOptions.TestRunTimeout, RetriesCount = runnerModeOptions.RetriesCount, Threshold = runnerModeOptions.Threshold, CustomArguments = runnerModeOptions.CustomArguments, RetriedResultsFilePath = runnerModeOptions.RetriedResultsFilePath, TestTechnology = runnerModeOptions.TestTechnology, }; InitializeAllTypes(serverUri); bool wasSuccessfulRun = false; try { var testExecutionService = _container.Resolve <TestExecutionService>(); wasSuccessfulRun = testExecutionService.ExecuteAsync(testRunSettings).Result; } catch (TestRunAbortedException ex) { var exceptionLogger = _container.Resolve <IDistributeLogger>(); exceptionLogger.LogErrorAsync(ex.Message, ex).Wait(); Environment.Exit(0); } catch (Exception ex) when(ex.InnerException.InnerException.Message.Contains("A connection with the server could not be established")) { Console.WriteLine($"A connection with the server {runnerModeOptions.TestServerUrl} could not be established."); Environment.Exit(-1); } catch (Exception e) { Console.WriteLine(e); var exceptionLogger = _container.Resolve <IDistributeLogger>(); exceptionLogger.LogErrorAsync(e.Message, e).Wait(); Environment.Exit(-1); } DateTime endTime = DateTime.Now; TimeSpan completionTime = endTime - startTime; Console.WriteLine($"Test Run Completed for {(int)completionTime.TotalMinutes} minutes and {completionTime.Seconds:00} seconds"); return(wasSuccessfulRun ? 1 : 0); }
public async Task <bool> ExecuteAsync(TestRunSettings testRunSettings) { if (!_pathProvider.IsFilePathValid(testRunSettings.ResultsFilePath)) { throw new ArgumentException($"The specified test results file path is not valid. Specified path = {testRunSettings.ResultsFilePath}"); } _pluginService.ExecuteAllTestRunnerPluginsPreTestRunLogic(); await _testRunsCleanerServiceClient.DeleteOldTestRunsDataAsync(); await _testCasesHistoryService.DeleteOlderTestCasesHistoryAsync(); var activeTestAgents = await _testAgentService.GetAllActiveTestAgentsByTagAsync(testRunSettings.AgentTag); await _testAgentService.SetAllActiveAgentsToVerifyTheirStatusAsync(testRunSettings.AgentTag); await _testAgentService.WaitAllActiveAgentsToVerifyTheirStatusAsync(activeTestAgents); var availableTestAgents = await _testAgentService.GetAllActiveTestAgentsByTagAsync(testRunSettings.AgentTag); bool wasSuccessfulRun = false; if (availableTestAgents.Count > 0) { var tempFilePath = _pathProvider.GetTempFileName(); _fileProvider.Delete(tempFilePath); _fileProvider.CreateZip(testRunSettings.OutputFilesLocation, tempFilePath); var zipData = _fileProvider.ReadAllBytes(tempFilePath); var testRunId = await _testRunProvider.CreateNewTestRunAsync(_pathProvider.GetFileName(testRunSettings.TestLibraryPath), zipData, testRunSettings.RetriesCount, testRunSettings.Threshold, testRunSettings.RunInParallel, testRunSettings.MaxParallelProcessesCount, testRunSettings.NativeArguments, testRunSettings.TestTechnology, testRunSettings.TimeBasedBalance, testRunSettings.CustomArguments); _testCasesProvider = _pluginService.GetNativeTestsRunnerTestCasesPluginService(testRunSettings.TestTechnology); var allTestCases = _testCasesProvider.ExtractAllTestCasesFromTestLibrary(testRunSettings.TestLibraryPath); var filteredTestCases = _testCasesFilterService.FilterCases(allTestCases, testRunSettings.TestsFilter); var distributedTestsLists = testRunSettings.TimeBasedBalance ? await _testsTimesBasedDistributeService.GenerateDistributionListsAsync(availableTestAgents.Count, filteredTestCases) : _testCountsBasedDistributeService.GenerateDistributionLists(availableTestAgents.Count, filteredTestCases); var testAgentRuns = await _testAgentRunProvider.CreateNewTestAgentRunsAsync(testRunId, availableTestAgents, distributedTestsLists); try { // TODO: pass ExecutionFrequency from args console? await _testAgentRunProvider.WaitForTestAgentRunsToFinishAsync(testAgentRuns, testRunSettings.TestRunTimeout, ExecutionFrequency); // DEBUG: ////_consoleProvider.WriteLine("AFTER WaitForTestAgentRunsToFinishAsync"); _consoleProvider.WriteLine(TestAgentRunsHasFinished); } catch (TimeoutException) { _consoleProvider.WriteLine(string.Format(TestRunHasTimedOut, testRunSettings.TestRunTimeout)); await _testAgentRunProvider.AbortAllTestAgentRunsInTestRunAsync(testRunId); } var areThereAbortedTestAgentRuns = await _testAgentRunProvider.AreThereAbortedTestAgentRunsAsync(testRunId); if (!areThereAbortedTestAgentRuns) { // DEBUG: ////_consoleProvider.WriteLine("START COMPLEETING TEST RUN"); await _testRunProvider.CompleteTestRunAsync(testRunId, TestRunStatus.Completed); wasSuccessfulRun = true; // DEBUG: _consoleProvider.WriteLine("TEST RUN COMPLETED"); await _testResultsService.SaveTestResultsForCurrentRunAsync(testRunSettings.TestTechnology, testRunSettings.ResultsFilePath, testRunSettings.RetriedResultsFilePath, testRunId); try { await _testRunsCleanerServiceClient.DeleteOldTestRunDataByTestRunIdAsync(testRunId); } catch (Exception e) { _consoleProvider.WriteLine(e.ToString()); } } else { _consoleProvider.WriteLine("Test Run Aborted!"); await _testRunProvider.CompleteTestRunAsync(testRunId, TestRunStatus.Aborted); } } else { _consoleProvider.WriteLine(NoTestAgentsAreAvailable); } _pluginService.ExecuteAllTestRunnerPluginsPostTestRunLogic(); return(wasSuccessfulRun); }