private static void WriteFailedGroup(ArrayList failedTests, string filename) { TestGroup group = new TestGroup(); group.ParallelTests = (ParallelTest[])failedTests.ToArray(typeof(ParallelTest)); TestConfLoader.WriteToFile(group, filename); }
void WriteGroup(List <ParallelTest> failedTests, string filename) { TestGroup group = new TestGroup(); group.ParallelTests = failedTests; TestConfLoader.WriteToFile(group, filename); }
static TestRange CalculateRange(string[] ranges, TestGroup group) { //--range=0-LAST == --range=0-group.ParallelTests.Length -1, //whatever the number of tests is if (ranges[1] == "LAST") { return(new TestRange(int.Parse(ranges[0]), group.ParallelTests.Count - 1)); } else { return(new TestRange(int.Parse(ranges[0]), int.Parse(ranges[1]))); } }
private static void CalculateRange(string[] ranges, TestGroup group) { //--range=0-LAST == --range=0-group.ParallelTests.Length -1, //whatever the number of tests is if (ranges[1] == "LAST") { SetTestRange(int.Parse(ranges[0]), group.ParallelTests.Count - 1); } else { SetTestRange(int.Parse(ranges[0]), int.Parse(ranges[1])); } }
public static void WriteToFile(TestGroup group, string file) { FileStream writer = new FileStream(file, FileMode.Create, FileAccess.Write); try { XmlSerializer ser = new XmlSerializer(typeof(TestGroup)); ser.Serialize(writer, group); } finally { writer.Close(); } }
static void LaunchAPattern(string arg, TestGroup group) { string pattern = arg.Substring("--pattern=".Length).ToLower(); List <ParallelTest> originalList = new List <ParallelTest>(group.ParallelTests); foreach (ParallelTest test in originalList) { if (test.Name.ToLower().IndexOf(pattern) < 0) { group.ParallelTests.Remove(test); } } }
public static void WriteToFile(TestGroup group, string file) { try { EnsureDirectoryExists(file); using (FileStream writer = new FileStream(file, FileMode.Create, FileAccess.Write)) { XmlSerializer ser = new XmlSerializer(typeof(TestGroup)); ser.Serialize(writer, group); } } catch (Exception e) { mLog.ErrorFormat("Unable to create file [{0}] to write failed tests! {1}{2}{3}", file, e.Message, Environment.NewLine, e.StackTrace); } }
void LogTestProgress( TestGroup group, TestRange testRange, int testCount, int i) { if (testCount != group.ParallelTests.Count) { mLog.InfoFormat("Test {0} of {1}. {2}/{3}", i, group.ParallelTests.Count, i - testRange.StartTest + 1, testCount); } else { mLog.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Count); } }
private static bool CheckValidInterval(TestGroup group) { if ((mStartTest > mEndTest) || (mStartTest < 0) || (mStartTest > group.ParallelTests.Count - 1)) { Console.WriteLine("Start test must be in a correct test range"); return false; } if ((mEndTest < mStartTest) || (mEndTest < 0) || (mEndTest > group.ParallelTests.Count - 1)) { Console.WriteLine("End test must be in a correct test range"); return false; } return true; }
static bool CheckValidInterval(TestRange testRange, TestGroup group) { if ((testRange.StartTest > testRange.EndTest) || (testRange.StartTest < 0) || (testRange.StartTest > group.ParallelTests.Count - 1)) { Console.WriteLine("Start test must be in a correct test range"); return(false); } if ((testRange.EndTest < testRange.StartTest) || (testRange.EndTest < 0) || (testRange.EndTest > group.ParallelTests.Count - 1)) { Console.WriteLine("End test must be in a correct test range"); return(false); } return(true); }
public static TestGroup LoadFromFile(string file, string[] args) { FileStream reader = new FileStream(file, FileMode.Open, FileAccess.Read); try { XmlSerializer ser = new XmlSerializer(typeof(TestGroup)); TestGroup result = (TestGroup)ser.Deserialize(reader); Variable[] processedVars = ParseVariablesFromCommandLine(args, result.Variables); ReplaceVariables(result.ParallelTests, processedVars); return(result); } finally { reader.Close(); } }
static TestRange LaunchARange(string arg, TestGroup group) { string rangeText = arg.Substring("--range=".Length); string[] limits = rangeText.Split('-'); if (!CheckValidRangeValues(rangeText, limits)) { return(null); } TestRange result = CalculateRange(limits, group); if (!CheckValidInterval(result, group)) { return(null); } mLog.InfoFormat("Starting test range [{0}-{1}]", result.StartTest, result.EndTest); return(result); }
static TestRange LaunchATest(string arg, TestGroup group) { string testName = arg.Substring("--test=".Length); int index = -1; for (int i = 0; i < group.ParallelTests.Count; i++) { if (group.ParallelTests[i].Name != testName) { continue; } index = i; break; } if (index == -1) { Console.WriteLine("The specified test was not found"); return(null); } return(new TestRange(index, index)); }
internal static LauncherArgs ProcessArgs(string[] args, TestGroup group) { LauncherArgs result = new LauncherArgs(); result.ConfigFile = args[0]; string testPath = Path.GetDirectoryName(Path.GetFullPath(result.ConfigFile)); result.TestRange = new TestRange(0, group.ParallelTests.Count - 1); result.FailedConfigFile = Path.Combine(testPath, "smokefailed.conf"); result.ResultLogFile = Path.Combine(testPath, SMOKE_RESULT_FILE); result.ErrorLogFile = Path.Combine(testPath, SMOKE_ERRORS_FILE); if (args.Length <= 1) { return(result); } foreach (string arg in args) { if (arg.StartsWith("--result=")) { result.ResultFile = Path.GetFullPath(arg.Substring(9)); continue; } if (arg.StartsWith("--failed=")) { result.FailedConfigFile = Path.GetFullPath(arg.Substring(9)); continue; } if (arg.StartsWith("--retry=")) { result.RetryOnFailure = int.Parse(arg.Substring("--retry=".Length)); mLog.InfoFormat("Retry on failure activated. {0} retries", result.RetryOnFailure); result.MaxRetry = result.RetryOnFailure; continue; } if (arg.StartsWith("--max_barrier_wait_time=")) { int maxBarrierTime = int.Parse(arg.Substring("--max_barrier_wait_time=".Length)); mLog.InfoFormat("Max Barrier wait time set to: {0} seconds", maxBarrierTime); Barrier.SetMaxWaitTime(maxBarrierTime); continue; } if (arg.Equals("--shell")) { result.ShellMode = true; continue; } if (arg.StartsWith("--test=")) { result.TestRange = LaunchATest(arg, group); continue; } if (arg.StartsWith("--range=")) { result.TestRange = LaunchARange(arg, group); continue; } if (arg.StartsWith("--pattern=")) { LaunchAPattern(arg, group); //update test range result.TestRange.SetTestRange(0, group.ParallelTests.Count - 1); continue; } if (arg.StartsWith("--timeout")) { result.TestsTimeout = SetTestTimeout(arg); } if (arg.StartsWith("--testslist=")) { result.ListTestsFile = Path.GetFullPath(arg.Substring("--testslist=".Length)); } if (arg.StartsWith("--usefilereport=")) { result.UseFileReport = Path.GetFullPath(arg.Substring("--usefilereport=".Length)); continue; } } return(result); }
private static void LaunchARange(string arg, TestGroup group) { string rangeText = arg.Substring("--range=".Length); string[] limits = rangeText.Split('-'); if (!CheckValidRangeValues(rangeText, limits)) { ResetTestRange(); return; } CalculateRange(limits, group); if (!CheckValidInterval(group)) { ResetTestRange(); return; } log.InfoFormat("Starting test range [{0}-{1}]", mStartTest, mEndTest); }
private static void WriteGroup(List<ParallelTest> failedTests, string filename) { TestGroup group = new TestGroup(); group.ParallelTests = failedTests; TestConfLoader.WriteToFile(group, filename); }
static void Main(string[] args) { string resultfile = null; string failedfile = null; try { // Load the test configuration file if (args.Length == 0) { Console.WriteLine("Usage: launcher configfile [--result=filename] [--failed=filename]"); return; } string configfile = args[0]; mTestPath = Path.GetDirectoryName(configfile); TestGroup group = TestConfLoader.LoadFromFile(configfile); failedfile = Path.Combine(mTestPath, "smokefailed.conf"); if (args.Length > 1) { foreach (string arg in args) { if (arg.StartsWith("--result=")) { resultfile = arg.Substring(9); resultfile = Path.GetFullPath(resultfile); } if (arg.StartsWith("--failed=")) { failedfile = arg.Substring(9); failedfile = Path.GetFullPath(failedfile); } } } if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); ArrayList failedGroups = new ArrayList(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach (ParallelTest test in group.ParallelTests) { int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < MAX_TEST_RETRY) { bRetry = false; log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } if (RetryTest(runnerResults)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (FailedTest(runnerResults)) { WriteFailed(runnerResults); failedGroups.Add(test); WriteFailedGroup(failedGroups, failedfile); } } ++i; } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); i = 0; foreach (TestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++i, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } if (res.IsFailure) { failedTests.Add(res); } } Log("Summary:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if (failedTests.Count > 0) { Log("==== Failed tests ==="); for (i = 0; i < failedTests.Count; ++i) { PrintResult(i, failedTests[i] as PNUnitTestResult); } } if (runners.Length > 1) { Log("Summary for all the parallel tests:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); } finally { WriteResult(resultfile); } }
static void RunLauncher(string customLogFolder, string[] args) { TestGroup group = TestConfLoader.LoadFromFile(args[0], args); LauncherArgs launcherArgs = CliArgsReader.ProcessArgs(args, group); if ((group == null) || (group.ParallelTests.Count == 0)) { Console.WriteLine("No tests to run"); return; } TestSuiteLoggerParams loggerParams = CliArgsReader.ProcessTestSuiteLoggerArgs(args); NUnitResultCollector nunitReport = new NUnitResultCollector(); LogWriter logWriter = new LogWriter(launcherArgs.ResultLogFile, launcherArgs.ErrorLogFile); try { string portValue = CliArgsReader.GetArgumentValue("--port=", args); int port = portValue == null ? DEFAULT_LAUNCHER_PORT : int.Parse(portValue); string ipToBind = CliArgsReader.GetArgumentValue("--iptobind=", args); Configurator.ConfigureRemoting(port, ipToBind ?? string.Empty); DateTime beginTimeStamp = DateTime.Now; TestSuiteLogger testSuiteLogger = null; if (loggerParams.IsInitialized()) { testSuiteLogger = new TestSuiteLogger(loggerParams); testSuiteLogger.SaveBuild(); testSuiteLogger.CreateSuite(); } Hashtable userValues = CliArgsReader.GetUserValues(args); Launcher launcher = new Launcher(); string listenAddress = string.Format("{0}:{1}", ipToBind ?? Environment.MachineName, port); List <string> testList = string.IsNullOrEmpty(launcherArgs.ListTestsFile) ? null : LoadTestsToRunFromFile(launcherArgs.ListTestsFile); Runner[] runners = launcher.RunTests( group, testList, launcherArgs.MaxRetry, launcherArgs.ShellMode, launcherArgs.RetryOnFailure, launcherArgs.FailedConfigFile, testSuiteLogger, launcherArgs.TestsTimeout, launcherArgs.TestRange, userValues, logWriter, listenAddress, launcherArgs.UseFileReport); DateTime endTimeStamp = DateTime.Now; FillNunitReport(nunitReport, runners); if (CliArgsReader.GetArgumentValue("--skipsummarylog", args) != null) { return; } LogWriter.PrintResults( runners, beginTimeStamp, endTimeStamp, logWriter); } finally { logWriter.WriteFullLog(launcherArgs.ResultFile); nunitReport.SaveResults(Path.Combine(customLogFolder, "pnunit-results.xml")); } }
static void Main(string[] args) { // Load the test configuration file if (args.Length == 0) { Console.WriteLine("Usage: launcher configfile"); return; } string configfile = args[0]; TestGroup group = TestConfLoader.LoadFromFile(configfile); if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach (ParallelTest test in group.ParallelTests) { Console.WriteLine("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i++] = runner; // Wait to finish runner.Join(); } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; PNUnitTestResult[] results = runner.GetTestResults(); Console.WriteLine("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName); i = 0; foreach (PNUnitTestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++i, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } } Console.WriteLine(); Console.WriteLine("Summary:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } if (runners.Length > 1) { Console.WriteLine(); Console.WriteLine("Summary for all the parallel tests:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Console.WriteLine("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds); }
private static Runner[] RunTests(string[] args, TestGroup group, int maxRetry, bool bShellMode, int retryOnFailure, string failedfile) { int testCount = mEndTest - mStartTest + 1; Runner[] runners = new Runner[testCount]; List<ParallelTest> failedGroups = new List<ParallelTest>(); Hashtable userValues = GetUserValues(args); for (int i = mStartTest; i <= mEndTest; ) { ParallelTest test = group.ParallelTests[i] as ParallelTest; int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < maxRetry) { bRetry = false; if (testCount != group.ParallelTests.Count) log.InfoFormat("Test {0} of {1}. {2}/{3}", i, group.ParallelTests.Count, i - mStartTest + 1, testCount); else log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Count); Runner runner = new Runner(test, userValues); if (bShellMode) runner.ShellMode = bShellMode; runner.Run(); runners[i - mStartTest] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } bRetry = RetryTest(runnerResults); bool bFailed = FailedTest(runnerResults); if (bRetry || ((bFailed && (retryOnFailure > 0) && ((retryCount + 1) < maxRetry)) /* so that list time is printed*/)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (bFailed) { failedGroups.Add(test); WriteGroup(failedGroups, failedfile); } } // updated at the bottom so it's not affected by retries ++i; } return runners; }
internal Runner[] RunTests( TestGroup group, List <string> testsList, int maxRetry, bool bShellMode, int retryOnFailure, string failedfile, TestSuiteLogger testSuiteLogger, int testsTimeout, TestRange testRange, Hashtable userValues, LogWriter logWriter, string listenAddress, string reportFile) { if (testRange == null) { mLog.Warn("No tests are selected to run. Exiting."); return(new Runner[0]); } int testCount = testRange.EndTest - testRange.StartTest + 1; int testToExecuteCount = (testsList != null) ? testsList.Count : testCount; mStatus = new LauncherStatus(testCount, testToExecuteCount); Runner[] runners = new Runner[testCount]; List <ParallelTest> failedGroups = new List <ParallelTest>(); for (int currentTest = testRange.StartTest; currentTest <= testRange.EndTest;) { int ini = Environment.TickCount; ParallelTest test = group.ParallelTests[currentTest] as ParallelTest; if (!IsSelectedTest(test, testsList)) { mLog.ErrorFormat( "Test with name [{0}] is not invited to this party.", test.Name); ++currentTest; mStatus.Increment(); continue; } mStatus.SetCurrentTestName(test.Name); int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < maxRetry) { bRetry = false; LogTestProgress(group, testRange, testCount, currentTest); Runner runner = new Runner( test, userValues, testsTimeout, logWriter, mStatus, listenAddress); if (bShellMode) { runner.ShellMode = bShellMode; } runners[currentTest - testRange.StartTest] = runner; if (reportFile != null) { StatusReport.Write(reportFile, mStatus, false); } runner.Run(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { mLog.InfoFormat("Error. Results for test [{0}] are NULL", test.Name); ++currentTest; mStatus.Increment(); mStatus.IncrementExecuted(); continue; } bool isRepeated = retryCount > 0; if (reportFile == null) { LogTestResultsToTTS( testSuiteLogger, runnerResults, test.Name, isRepeated); } else { LogTestResultsToFile( reportFile, runnerResults, test.Name, isRepeated, true); } bRetry = RetryTest(runnerResults); bool bFailed = FailedTest(runnerResults); if (bRetry || ((bFailed && (retryOnFailure > 0) && ((retryCount + 1) < maxRetry)) /* so that list time is printed*/)) { bRetry = true; ++retryCount; mLog.Info("Test failed with retry option, trying again"); mStatus.AddRepeated(test.Name); continue; } if (bFailed) { failedGroups.Add(test); WriteGroup(failedGroups, failedfile); mStatus.AddFailed(test.Name); } if (IgnoredTest(runnerResults)) { mStatus.AddIgnored(test.Name); } } // updated at the bottom so it's not affected by retries mStatus.Increment(); mStatus.IncrementExecuted(); ++currentTest; mLog.DebugFormat("Test {0} time {1} ms", test.Name, Environment.TickCount - ini); } if (reportFile != null) { StatusReport.Write(reportFile, mStatus, true); } return(runners); }
private static void LaunchAPattern(string arg, TestGroup group) { string pattern = arg.Substring("--pattern=".Length).ToLower(); List<ParallelTest> originalList = new List<ParallelTest>(group.ParallelTests); foreach (ParallelTest test in originalList) { if (test.Name.ToLower().IndexOf(pattern) < 0) { group.ParallelTests.Remove(test); } } }
private static void LaunchATest(string arg, TestGroup group) { string testName = arg.Substring("--test=".Length); int index = -1; for(int i=0; i< group.ParallelTests.Count; i++) { if(group.ParallelTests[i].Name != testName) continue; index = i; break; } if(index == -1) { Console.WriteLine("The specified test was not found"); ResetTestRange(); return; } SetTestRange(index, index); }
private static void WriteGroup(ArrayList failedTests, string filename) { TestGroup group = new TestGroup(); group.ParallelTests = (ParallelTest[]) failedTests.ToArray(typeof(ParallelTest)); TestConfLoader.WriteToFile(group, filename); }
static void Main(string[] args) { string resultfile = null; string failedfile = null; string passedfile = null; int retryOnFailure = 0; int maxRetry = MAX_TEST_RETRY; ConfigureLogging(); try { // Load the test configuration file if (args.Length == 0) { Console.WriteLine( "Usage: launcher configfile [--result=filename] [--failed=filename] [-D:var=value] [-val:variable=value] [--retry=number] [--range=from-to] [--test=testname]"); return; } string configfile = args[0]; mTestPath = Path.GetDirectoryName(configfile); TestGroup group = TestConfLoader.LoadFromFile(configfile, args); int startTest = 0; int endTest = group.ParallelTests.Length - 1; failedfile = Path.Combine(mTestPath, "smokefailed.conf"); passedfile = Path.Combine(mTestPath, "smokepassed.conf"); if (args.Length > 1) { foreach (string arg in args) { if (arg.StartsWith("--result=")) { resultfile = arg.Substring(9); resultfile = Path.GetFullPath(resultfile); } if (arg.StartsWith("--failed=")) { failedfile = arg.Substring(9); failedfile = Path.GetFullPath(failedfile); } if (arg.StartsWith("--retry=")) { retryOnFailure = int.Parse(arg.Substring("--retry=".Length)); log.InfoFormat("Retry on failure activated. {0} retries", retryOnFailure); maxRetry = retryOnFailure; } if (arg.StartsWith("--test=")) { string testName = arg.Substring("--test=".Length); int index = -1; for (int i = 0; i < group.ParallelTests.Length; i++) { if (group.ParallelTests[i].Name != testName) { continue; } index = i; break; } if (index == -1) { Console.WriteLine("The specified test was not found"); return; } startTest = index; endTest = index; } if (arg.StartsWith("--range=")) { string range = arg.Substring("--range=".Length); // now range should be something like xx-xx if (range.IndexOf("-") < 0) { Console.WriteLine("Test range incorrectly specified, it must be something like 0-10"); return; } string[] ranges = range.Split('-'); if (ranges.Length != 2) { Console.WriteLine("Test range incorrectly specified, it must be something like 0-10"); return; } startTest = int.Parse(ranges[0]); endTest = int.Parse(ranges[1]); if ((startTest > endTest) || (startTest < 0) || (startTest > group.ParallelTests.Length - 1)) { Console.WriteLine("Start test must be in a correct test range"); return; } if ((endTest < startTest) || (endTest < 0) || (endTest > group.ParallelTests.Length - 1)) { Console.WriteLine("End test must be in a correct test range"); return; } log.InfoFormat("Starting test range [{0}-{1}]", startTest, endTest); } } } if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } Hashtable userValues = GetUserValues(args); ConfigureRemoting(); ArrayList failedGroups = new ArrayList(); ArrayList passedGroups = new ArrayList(); int testCount = endTest - startTest + 1; // Each parallel test is launched sequencially... Runner[] runners = new Runner[testCount]; DateTime beginTimestamp = DateTime.Now; for (int i = startTest; i <= endTest;) { ParallelTest test = group.ParallelTests[i] as ParallelTest; int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < maxRetry) { bRetry = false; if (testCount != group.ParallelTests.Length) { log.InfoFormat("Test {0} of {1}. {2}/{3}", i, group.ParallelTests.Length, i - startTest + 1, testCount); } else { log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Length); } Runner runner = new Runner(test, userValues); runner.Run(); runners[i - startTest] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } bRetry = RetryTest(runnerResults); bool bFailed = FailedTest(runnerResults); if (bRetry || ((bFailed && (retryOnFailure > 0) && ((retryCount + 1) < maxRetry)) /* so that list time is printed*/)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (bFailed) { failedGroups.Add(test); WriteGroup(failedGroups, failedfile); } else { passedGroups.Add(test); WriteGroup(passedGroups, passedfile); } } // updated at the bottom so it's not affected by retries ++i; } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); int j; foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); j = 0; foreach (TestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++j, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } if (res.IsFailure) { failedTests.Add(res); } } Log("Summary:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if (failedTests.Count > 0) { Log("==== Failed tests ==="); for (j = 0; j < failedTests.Count; ++j) { PrintResult(j, failedTests[j] as PNUnitTestResult); } } if (runners.Length > 1) { Log("Summary for all the parallel tests:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); } finally { WriteResult(resultfile); } }