private static void WriteFailedGroup(ArrayList failedTests, string filename) { TestGroup group = new TestGroup(); group.ParallelTests = (ParallelTest[])failedTests.ToArray(typeof(ParallelTest)); TestConfLoader.WriteToFile(group, filename); }
void WriteGroup(List <ParallelTest> failedTests, string filename) { TestGroup group = new TestGroup(); group.ParallelTests = failedTests; TestConfLoader.WriteToFile(group, filename); }
static void Main(string[] args) { string resultfile = null; string failedfile = null; try { // Load the test configuration file if (args.Length == 0) { Console.WriteLine("Usage: launcher configfile [--result=filename] [--failed=filename]"); return; } string configfile = args[0]; mTestPath = Path.GetDirectoryName(configfile); TestGroup group = TestConfLoader.LoadFromFile(configfile); failedfile = Path.Combine(mTestPath, "smokefailed.conf"); if (args.Length > 1) { foreach (string arg in args) { if (arg.StartsWith("--result=")) { resultfile = arg.Substring(9); resultfile = Path.GetFullPath(resultfile); } if (arg.StartsWith("--failed=")) { failedfile = arg.Substring(9); failedfile = Path.GetFullPath(failedfile); } } } if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); ArrayList failedGroups = new ArrayList(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach (ParallelTest test in group.ParallelTests) { int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < MAX_TEST_RETRY) { bRetry = false; log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } if (RetryTest(runnerResults)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (FailedTest(runnerResults)) { WriteFailed(runnerResults); failedGroups.Add(test); WriteFailedGroup(failedGroups, failedfile); } } ++i; } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); i = 0; foreach (TestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++i, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } if (res.IsFailure) { failedTests.Add(res); } } Log("Summary:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if (failedTests.Count > 0) { Log("==== Failed tests ==="); for (i = 0; i < failedTests.Count; ++i) { PrintResult(i, failedTests[i] as PNUnitTestResult); } } if (runners.Length > 1) { Log("Summary for all the parallel tests:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); } finally { WriteResult(resultfile); } }
static void RunLauncher(string customLogFolder, string[] args) { TestGroup group = TestConfLoader.LoadFromFile(args[0], args); LauncherArgs launcherArgs = CliArgsReader.ProcessArgs(args, group); if ((group == null) || (group.ParallelTests.Count == 0)) { Console.WriteLine("No tests to run"); return; } TestSuiteLoggerParams loggerParams = CliArgsReader.ProcessTestSuiteLoggerArgs(args); NUnitResultCollector nunitReport = new NUnitResultCollector(); LogWriter logWriter = new LogWriter(launcherArgs.ResultLogFile, launcherArgs.ErrorLogFile); try { string portValue = CliArgsReader.GetArgumentValue("--port=", args); int port = portValue == null ? DEFAULT_LAUNCHER_PORT : int.Parse(portValue); string ipToBind = CliArgsReader.GetArgumentValue("--iptobind=", args); Configurator.ConfigureRemoting(port, ipToBind ?? string.Empty); DateTime beginTimeStamp = DateTime.Now; TestSuiteLogger testSuiteLogger = null; if (loggerParams.IsInitialized()) { testSuiteLogger = new TestSuiteLogger(loggerParams); testSuiteLogger.SaveBuild(); testSuiteLogger.CreateSuite(); } Hashtable userValues = CliArgsReader.GetUserValues(args); Launcher launcher = new Launcher(); string listenAddress = string.Format("{0}:{1}", ipToBind ?? Environment.MachineName, port); List <string> testList = string.IsNullOrEmpty(launcherArgs.ListTestsFile) ? null : LoadTestsToRunFromFile(launcherArgs.ListTestsFile); Runner[] runners = launcher.RunTests( group, testList, launcherArgs.MaxRetry, launcherArgs.ShellMode, launcherArgs.RetryOnFailure, launcherArgs.FailedConfigFile, testSuiteLogger, launcherArgs.TestsTimeout, launcherArgs.TestRange, userValues, logWriter, listenAddress, launcherArgs.UseFileReport); DateTime endTimeStamp = DateTime.Now; FillNunitReport(nunitReport, runners); if (CliArgsReader.GetArgumentValue("--skipsummarylog", args) != null) { return; } LogWriter.PrintResults( runners, beginTimeStamp, endTimeStamp, logWriter); } finally { logWriter.WriteFullLog(launcherArgs.ResultFile); nunitReport.SaveResults(Path.Combine(customLogFolder, "pnunit-results.xml")); } }
static void Main(string[] args) { // Load the test configuration file if (args.Length == 0) { Console.WriteLine("Usage: launcher configfile"); return; } string configfile = args[0]; TestGroup group = TestConfLoader.LoadFromFile(configfile); if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach (ParallelTest test in group.ParallelTests) { Console.WriteLine("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i++] = runner; // Wait to finish runner.Join(); } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; PNUnitTestResult[] results = runner.GetTestResults(); Console.WriteLine("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName); i = 0; foreach (PNUnitTestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++i, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } } Console.WriteLine(); Console.WriteLine("Summary:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } if (runners.Length > 1) { Console.WriteLine(); Console.WriteLine("Summary for all the parallel tests:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Console.WriteLine("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds); }
static void Main(string[] args) { string resultfile = null; string failedfile = null; string passedfile = null; int retryOnFailure = 0; int maxRetry = MAX_TEST_RETRY; ConfigureLogging(); try { // Load the test configuration file if (args.Length == 0) { Console.WriteLine( "Usage: launcher configfile [--result=filename] [--failed=filename] [-D:var=value] [-val:variable=value] [--retry=number] [--range=from-to] [--test=testname]"); return; } string configfile = args[0]; mTestPath = Path.GetDirectoryName(configfile); TestGroup group = TestConfLoader.LoadFromFile(configfile, args); int startTest = 0; int endTest = group.ParallelTests.Length - 1; failedfile = Path.Combine(mTestPath, "smokefailed.conf"); passedfile = Path.Combine(mTestPath, "smokepassed.conf"); if (args.Length > 1) { foreach (string arg in args) { if (arg.StartsWith("--result=")) { resultfile = arg.Substring(9); resultfile = Path.GetFullPath(resultfile); } if (arg.StartsWith("--failed=")) { failedfile = arg.Substring(9); failedfile = Path.GetFullPath(failedfile); } if (arg.StartsWith("--retry=")) { retryOnFailure = int.Parse(arg.Substring("--retry=".Length)); log.InfoFormat("Retry on failure activated. {0} retries", retryOnFailure); maxRetry = retryOnFailure; } if (arg.StartsWith("--test=")) { string testName = arg.Substring("--test=".Length); int index = -1; for (int i = 0; i < group.ParallelTests.Length; i++) { if (group.ParallelTests[i].Name != testName) { continue; } index = i; break; } if (index == -1) { Console.WriteLine("The specified test was not found"); return; } startTest = index; endTest = index; } if (arg.StartsWith("--range=")) { string range = arg.Substring("--range=".Length); // now range should be something like xx-xx if (range.IndexOf("-") < 0) { Console.WriteLine("Test range incorrectly specified, it must be something like 0-10"); return; } string[] ranges = range.Split('-'); if (ranges.Length != 2) { Console.WriteLine("Test range incorrectly specified, it must be something like 0-10"); return; } startTest = int.Parse(ranges[0]); endTest = int.Parse(ranges[1]); if ((startTest > endTest) || (startTest < 0) || (startTest > group.ParallelTests.Length - 1)) { Console.WriteLine("Start test must be in a correct test range"); return; } if ((endTest < startTest) || (endTest < 0) || (endTest > group.ParallelTests.Length - 1)) { Console.WriteLine("End test must be in a correct test range"); return; } log.InfoFormat("Starting test range [{0}-{1}]", startTest, endTest); } } } if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } Hashtable userValues = GetUserValues(args); ConfigureRemoting(); ArrayList failedGroups = new ArrayList(); ArrayList passedGroups = new ArrayList(); int testCount = endTest - startTest + 1; // Each parallel test is launched sequencially... Runner[] runners = new Runner[testCount]; DateTime beginTimestamp = DateTime.Now; for (int i = startTest; i <= endTest;) { ParallelTest test = group.ParallelTests[i] as ParallelTest; int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < maxRetry) { bRetry = false; if (testCount != group.ParallelTests.Length) { log.InfoFormat("Test {0} of {1}. {2}/{3}", i, group.ParallelTests.Length, i - startTest + 1, testCount); } else { log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Length); } Runner runner = new Runner(test, userValues); runner.Run(); runners[i - startTest] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } bRetry = RetryTest(runnerResults); bool bFailed = FailedTest(runnerResults); if (bRetry || ((bFailed && (retryOnFailure > 0) && ((retryCount + 1) < maxRetry)) /* so that list time is printed*/)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (bFailed) { failedGroups.Add(test); WriteGroup(failedGroups, failedfile); } else { passedGroups.Add(test); WriteGroup(passedGroups, passedfile); } } // updated at the bottom so it's not affected by retries ++i; } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); int j; foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); j = 0; foreach (TestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++j, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } if (res.IsFailure) { failedTests.Add(res); } } Log("Summary:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if (failedTests.Count > 0) { Log("==== Failed tests ==="); for (j = 0; j < failedTests.Count; ++j) { PrintResult(j, failedTests[j] as PNUnitTestResult); } } if (runners.Length > 1) { Log("Summary for all the parallel tests:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); } finally { WriteResult(resultfile); } }