static void Main(string[] args) { string resultfile = null; string failedfile = null; string passedfile = null; int retryOnFailure = 0; int maxRetry = MAX_TEST_RETRY; ConfigureLogging(); try { // Load the test configuration file if( args.Length == 0 ) { Console.WriteLine( "Usage: launcher configfile [--result=filename] [--failed=filename] [-D:var=value] [-val:variable=value] [--retry=number] [--range=from-to] [--test=testname]"); return; } string configfile = args[0]; mTestPath = Path.GetDirectoryName(configfile); TestGroup group = TestConfLoader.LoadFromFile(configfile, args); int startTest = 0; int endTest = group.ParallelTests.Length - 1; failedfile = Path.Combine(mTestPath, "smokefailed.conf"); passedfile = Path.Combine(mTestPath, "smokepassed.conf"); if( args.Length > 1 ) { foreach( string arg in args ) { if( arg.StartsWith("--result=") ) { resultfile = arg.Substring(9); resultfile = Path.GetFullPath(resultfile); } if( arg.StartsWith("--failed=") ) { failedfile = arg.Substring(9); failedfile = Path.GetFullPath(failedfile); } if( arg.StartsWith("--retry=") ) { retryOnFailure = int.Parse(arg.Substring("--retry=".Length)); log.InfoFormat("Retry on failure activated. {0} retries", retryOnFailure); maxRetry = retryOnFailure; } if(arg.StartsWith("--test=")) { string testName = arg.Substring("--test=".Length); int index = -1; for(int i=0; i< group.ParallelTests.Length; i++) { if(group.ParallelTests[i].Name != testName) continue; index = i; break; } if(index == -1) { Console.WriteLine("The specified test was not found"); return; } startTest = index; endTest = index; } if( arg.StartsWith("--range=") ) { string range = arg.Substring("--range=".Length); // now range should be something like xx-xx if( range.IndexOf("-") < 0 ) { Console.WriteLine("Test range incorrectly specified, it must be something like 0-10"); return; } string[] ranges = range.Split('-'); if( ranges.Length != 2 ) { Console.WriteLine("Test range incorrectly specified, it must be something like 0-10"); return; } startTest = int.Parse(ranges[0]); endTest = int.Parse(ranges[1]); if( (startTest > endTest) || (startTest < 0) || (startTest > group.ParallelTests.Length - 1) ) { Console.WriteLine("Start test must be in a correct test range"); return; } if( (endTest < startTest) || (endTest < 0) || (endTest > group.ParallelTests.Length - 1) ) { Console.WriteLine("End test must be in a correct test range"); return; } log.InfoFormat("Starting test range [{0}-{1}]", startTest, endTest); } } } if( (group == null) || (group.ParallelTests.Length == 0) ) { Console.WriteLine("No tests to run"); return; } Hashtable userValues = GetUserValues(args); ConfigureRemoting(); ArrayList failedGroups = new ArrayList(); ArrayList passedGroups = new ArrayList(); int testCount = endTest - startTest + 1; // Each parallel test is launched sequencially... Runner[] runners = new Runner[testCount]; DateTime beginTimestamp = DateTime.Now; for( int i = startTest; i <= endTest; ) { ParallelTest test = group.ParallelTests[i] as ParallelTest; int retryCount = 0; bool bRetry = true; while( bRetry && retryCount < maxRetry ) { bRetry = false; if( testCount != group.ParallelTests.Length ) log.InfoFormat("Test {0} of {1}. {2}/{3}", i, group.ParallelTests.Length, i-startTest+1, testCount); else log.InfoFormat("Test {0} of {1}", i+1, group.ParallelTests.Length); Runner runner = new Runner(test, userValues); runner.Run(); runners[i-startTest] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if( runnerResults == null ) { log.Info("Error. Results are NULL"); ++i; continue; } bRetry = RetryTest(runnerResults); bool bFailed = FailedTest(runnerResults); if( bRetry || ((bFailed && (retryOnFailure > 0) && ((retryCount + 1) < maxRetry ) ) /* so that list time is printed*/) ) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if( bFailed ) { failedGroups.Add(test); WriteGroup(failedGroups, failedfile); } else { passedGroups.Add(test); WriteGroup(passedGroups, passedfile); } } // updated at the bottom so it's not affected by retries ++i; } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); int j; foreach( Runner runner in runners ) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); j = 0; foreach( TestResult res in results ) { if( res.Executed ) ++ExecutedTests; if( res.IsFailure ) ++FailedTests; if( res.IsSuccess ) ++SuccessTests; PrintResult(++j, res); if( res.Time > BiggerTime ) BiggerTime = res.Time; if( res.IsFailure ) failedTests.Add(res); } Log("Summary:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if( failedTests.Count > 0 ) { Log("==== Failed tests ==="); for( j = 0; j < failedTests.Count; ++j ) PrintResult(j, failedTests[j] as PNUnitTestResult); } if( runners.Length > 1 ) { Log("Summary for all the parallel tests:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); } finally { WriteResult(resultfile); } }
static void Main(string[] args) { // Load the test configuration file if( args.Length == 0 ) { Console.WriteLine("Usage: launcher configfile"); return; } string configfile = args[0]; TestGroup group = TestConfLoader.LoadFromFile(configfile); if( (group == null) || (group.ParallelTests.Length == 0) ) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach( ParallelTest test in group.ParallelTests ) { Console.WriteLine("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i++] = runner; // Wait to finish runner.Join(); } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; foreach( Runner runner in runners ) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; PNUnitTestResult[] results = runner.GetTestResults(); Console.WriteLine("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName); i = 0; foreach( PNUnitTestResult res in results ) { if( res.Executed ) ++ExecutedTests; if( res.IsFailure ) ++FailedTests; if( res.IsSuccess ) ++SuccessTests; PrintResult(++i, res); if( res.Time > BiggerTime ) BiggerTime = res.Time; } Console.WriteLine(); Console.WriteLine("Summary:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } if( runners.Length > 1 ) { Console.WriteLine(); Console.WriteLine("Summary for all the parallel tests:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Console.WriteLine("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds); }
static void Main(string[] args) { string resultfile = null; string failedfile = null; try { // Load the test configuration file if( args.Length == 0 ) { Console.WriteLine("Usage: launcher configfile [--result=filename] [--failed=filename]"); return; } string configfile = args[0]; mTestPath = Path.GetDirectoryName (configfile); TestGroup group = TestConfLoader.LoadFromFile(configfile); failedfile = Path.Combine(mTestPath, "smokefailed.conf"); if (args.Length > 1) { foreach (string arg in args) { if(arg.StartsWith("--result=")) { resultfile = arg.Substring(9); resultfile = Path.GetFullPath(resultfile); } if(arg.StartsWith("--failed=")) { failedfile = arg.Substring(9); failedfile = Path.GetFullPath(failedfile); } } } if( (group == null) || (group.ParallelTests.Length == 0) ) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); ArrayList failedGroups = new ArrayList(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach (ParallelTest test in group.ParallelTests) { int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < MAX_TEST_RETRY) { bRetry = false; log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } if (RetryTest(runnerResults)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (FailedTest(runnerResults)) { WriteFailed(runnerResults); failedGroups.Add(test); WriteFailedGroup(failedGroups, failedfile); } } ++i; } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); foreach( Runner runner in runners ) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); i = 0; foreach( TestResult res in results ) { if( res.Executed ) ++ExecutedTests; if( res.IsFailure ) ++FailedTests; if( res.IsSuccess ) ++SuccessTests; PrintResult(++i, res); if( res.Time > BiggerTime ) BiggerTime = res.Time; if( res.IsFailure ) failedTests.Add(res); } Log("Summary:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if( failedTests.Count > 0 ) { Log("==== Failed tests ==="); for( i = 0; i < failedTests.Count; ++i ) PrintResult(i, failedTests[i] as PNUnitTestResult); } if( runners.Length > 1 ) { Log("Summary for all the parallel tests:"); Log(string.Format("\tTotal: {0}\r\n\tExecuted: {1}\r\n\tFailed: {2}\r\n\tSuccess: {3}\r\n\t% Success: {4}\r\n\tBiggest Execution Time: {5} s\r\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); } finally { WriteResult(resultfile); } }
private static Runner[] RunTests(string[] args, TestGroup group, int maxRetry, bool bShellMode, int retryOnFailure, string failedfile) { int testCount = mEndTest - mStartTest + 1; Runner[] runners = new Runner[testCount]; List<ParallelTest> failedGroups = new List<ParallelTest>(); Hashtable userValues = GetUserValues(args); for (int i = mStartTest; i <= mEndTest; ) { ParallelTest test = group.ParallelTests[i] as ParallelTest; int retryCount = 0; bool bRetry = true; while (bRetry && retryCount < maxRetry) { bRetry = false; if (testCount != group.ParallelTests.Count) log.InfoFormat("Test {0} of {1}. {2}/{3}", i, group.ParallelTests.Count, i - mStartTest + 1, testCount); else log.InfoFormat("Test {0} of {1}", i + 1, group.ParallelTests.Count); Runner runner = new Runner(test, userValues); if (bShellMode) runner.ShellMode = bShellMode; runner.Run(); runners[i - mStartTest] = runner; // Wait to finish runner.Join(); TestResult[] runnerResults = runner.GetTestResults(); if (runnerResults == null) { log.Info("Error. Results are NULL"); ++i; continue; } bRetry = RetryTest(runnerResults); bool bFailed = FailedTest(runnerResults); if (bRetry || ((bFailed && (retryOnFailure > 0) && ((retryCount + 1) < maxRetry)) /* so that list time is printed*/)) { bRetry = true; ++retryCount; log.Info("Test failed with retry option, trying again"); continue; } if (bFailed) { failedGroups.Add(test); WriteGroup(failedGroups, failedfile); } } // updated at the bottom so it's not affected by retries ++i; } return runners; }
private static void PrintResults(Runner[] runners, DateTime beginTimeStamp, DateTime endTimeStamp, NUnitResultCollector nunitReport) { double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalIgnoredTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; IList failedTests = new ArrayList(); int j; foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; int IgnoredTests = 0; double BiggerTime = 0; TestResult[] results = runner.GetTestResults(); Log(string.Format("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName)); j = 0; nunitReport.AddResults(results); foreach (TestResult res in results) { if (!res.Executed) { ++IgnoredTests; continue; } if (res.Executed) ++ExecutedTests; if (res.IsFailure) ++FailedTests; if (res.IsSuccess) ++SuccessTests; PrintResult(++j, res); if (res.Time > BiggerTime) BiggerTime = res.Time; if (res.IsFailure) failedTests.Add(res); } Log("Summary:"); Log(string.Format( "\tTotal: {0}\r\n" + "\tExecuted: {1}\r\n" + "\tIgnored: {2}\r\n" + "\tFailed: {3}\r\n" + "\tSuccess: {4}\r\n" + "\t% Success: {5}\r\n" + "\tBiggest Execution Time: {6} s\r\n", results.Length, ExecutedTests, IgnoredTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime)); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalIgnoredTests += IgnoredTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } // print all failed tests together if (failedTests.Count > 0) { Log("==== Failed tests ==="); for (j = 0; j < failedTests.Count; ++j) PrintResult(j, failedTests[j] as PNUnitTestResult); } if (runners.Length > 1) { Log("Summary for all the parallel tests:"); Log(string.Format( "R00:\tTotal: {0}\r\nR01:\t" + "Executed: {1}\r\nR02:\t" + "Ignored: {2}\r\nR03:\t" + "Failed: {3}\r\nR04:\t" + "Success: {4}\r\nR05:\t%" + "Success: {5}\r\nR06:\tBiggest Execution Time: {6} s\r\n", TotalTests, TotalExecutedTests, TotalIgnoredTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime)); } TimeSpan elapsedTime = endTimeStamp.Subtract(beginTimeStamp); Log(string.Format("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds)); }
static void Main(string[] args) { // Load the test configuration file if (args.Length == 0) { Console.WriteLine("Usage: launcher configfile"); return; } string configfile = args[0]; TestGroup group = TestConfLoader.LoadFromFile(configfile); if ((group == null) || (group.ParallelTests.Length == 0)) { Console.WriteLine("No tests to run"); return; } ConfigureLogging(); ConfigureRemoting(); // Each parallel test is launched sequencially... Runner[] runners = new Runner[group.ParallelTests.Length]; int i = 0; DateTime beginTimestamp = DateTime.Now; foreach (ParallelTest test in group.ParallelTests) { Console.WriteLine("Test {0} of {1}", i + 1, group.ParallelTests.Length); Runner runner = new Runner(test); runner.Run(); runners[i++] = runner; // Wait to finish runner.Join(); } DateTime endTimestamp = DateTime.Now; // Print the results double TotalBiggerTime = 0; int TotalTests = 0; int TotalExecutedTests = 0; int TotalFailedTests = 0; int TotalSuccessTests = 0; foreach (Runner runner in runners) { int ExecutedTests = 0; int FailedTests = 0; int SuccessTests = 0; double BiggerTime = 0; PNUnitTestResult[] results = runner.GetTestResults(); Console.WriteLine("==== Tests Results for Parallel TestGroup {0} ===", runner.TestGroupName); i = 0; foreach (PNUnitTestResult res in results) { if (res.Executed) { ++ExecutedTests; } if (res.IsFailure) { ++FailedTests; } if (res.IsSuccess) { ++SuccessTests; } PrintResult(++i, res); if (res.Time > BiggerTime) { BiggerTime = res.Time; } } Console.WriteLine(); Console.WriteLine("Summary:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", results.Length, ExecutedTests, FailedTests, SuccessTests, results.Length > 0 ? 100 * SuccessTests / results.Length : 0, BiggerTime); TotalTests += results.Length; TotalExecutedTests += ExecutedTests; TotalFailedTests += FailedTests; TotalSuccessTests += SuccessTests; TotalBiggerTime += BiggerTime; } if (runners.Length > 1) { Console.WriteLine(); Console.WriteLine("Summary for all the parallel tests:"); Console.WriteLine("\tTotal: {0}\n\tExecuted: {1}\n\tFailed: {2}\n\tSuccess: {3}\n\t% Success: {4}\n\tBiggest Execution Time: {5} s\n", TotalTests, TotalExecutedTests, TotalFailedTests, TotalSuccessTests, TotalTests > 0 ? 100 * TotalSuccessTests / TotalTests : 0, TotalBiggerTime); } TimeSpan elapsedTime = endTimestamp.Subtract(beginTimestamp); Console.WriteLine("Launcher execution time: {0} seconds", elapsedTime.TotalSeconds); }
public void NotifyResult(string testName, PNUnitTestResult result) { mLog.DebugFormat("NotifyResult called for TestGroup {0}, Test {1}", mTestGroup.Name, testName); int count = 0; mLog.DebugFormat( "NotifyResult lock entered for TestGroup {0}, Test {1}", mTestGroup.Name, testName); mTestsRun.AddExecutedTest(testName); mTestsRun.AddTestResult(result); count = mTestsRun.TestsResultsCount; lock (mBarriers) { if (mBarriersOfTests.ContainsKey(testName)) { mLog.DebugFormat("Going to abandon barriers of test {0}", testName); IList list = (IList)mBarriersOfTests[testName]; foreach (string barrier in list) { mLog.DebugFormat("Abandoning barrier {0}", barrier); mBarriers[barrier].Abandon(); } } } mLog.DebugFormat( "NotifyResult finishing for TestGroup {0}, Test {1}.", mTestGroup.Name, testName); string machine = Runner.GetTestConfFromName(mTestGroup, testName).Machine; string resultText = result.IsSuccess ? "PASS" : "FAIL"; if (!result.Executed) { resultText = "IGNORED"; } string message = string.Format( "Result for TestGroup {0}, Test {1}: {2}. Time {3} ms. {4}/{5} tests finished. Agent: {6}", mTestGroup.Name, testName, resultText, Environment.TickCount - mInitialTime, count, mTestsRun.LaunchedCount, machine); string logTestName = string.Format("{0}.{1}", mTestGroup.Name, testName); if (!result.Executed) { mLogWriter.LogWarn(message); mLogWriter.WriteTestLog(logTestName, result, machine); return; } if (result.IsSuccess) { mLogWriter.Log(message); mLogWriter.WriteTestLog(logTestName, result, machine); } else { mLogWriter.LogError(message); mLogWriter.WriteFailedTestLog(logTestName, result, machine); } if (mTestsRun.AllTestsFinished()) { mLog.DebugFormat( "All the tests notified the results, waking up. mResults.Count == {0}", mTestsRun.TestsResultsCount); mFinish.Set(); } }