public void Merge(TestRunResults results) { lock (_padLock) { removeChanged(results); mergeTestList(_failed, results.Assembly, results.Project, results.Failed, results.Passed); mergeTestList(_ignored, results.Assembly, results.Project, results.Ignored, results.Passed); } }
public void Merge(TestRunResults results) { lock (_padLock) { Debug.WriteDebug("Merging test run results"); removeChanged(results); mergeTestList(_failed, results.Assembly, results.Project, results.Failed, results.Passed); mergeTestList(_ignored, results.Assembly, results.Project, results.Ignored, results.Passed); } }
public void Should_remove_any_item_with_runner_type_any() { var results = new TestResult[] { new TestResult(TestRunner.Any, TestRunStatus.Failed, "Test name", "Message", new IStackLine[] {}) }; var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Test name", "", new IStackLine[] { }) }); _runResultCache.Merge(runResults); _runResultCache.Failed.Length.ShouldEqual(0); }
public void Should_remove_cached_ignored_tests_that_now_passes() { var results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] {}), new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Another test", "Message", new IStackLine[] {}) }; var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Test name", "", new IStackLine[] { }) }); _runResultCache.Merge(runResults); _runResultCache.Ignored.Length.ShouldEqual(1); }
private static void handleSuite(XmlNodeList suites, List<TestRunResults> results, string testLocation) { foreach (XmlNode suite in suites) { handleSuite(suite.SelectNodes("testsuite"), results, testLocation); var tests = getTests(suite); if (tests.Count() > 0) { var result = new TestRunResults( suite.Attributes["name"].Value, testLocation, false, TestRunner.PhpUnit, tests.ToArray()); result.SetTimeSpent(TimeSpan.FromMilliseconds(1000*double.Parse(suite.Attributes["time"].Value))); results.Add(result); } } }
/// <summary> /// Set the test run results based on the parallel runner exit code. /// </summary> /// <param name="runResults"></param> /// <param name="exitCode"></param> /// <param name="failureReason"></param> /// <param name="errorReason"></param> private void RunResultsFromParallelRunnerExitCode(TestRunResults runResults, int exitCode, string failureReason, ref string errorReason) { // set the status of the build based on the exit code switch (exitCode) { case (int)ParallelRunResult.Pass: runResults.TestState = TestState.Passed; break; case (int)ParallelRunResult.Warning: runResults.TestState = TestState.Warning; break; case (int)ParallelRunResult.Fail: runResults.ErrorDesc = "ParallelRunner test has FAILED!"; runResults.TestState = TestState.Failed; break; case (int)ParallelRunResult.Canceled: runResults.ErrorDesc = "ParallelRunner was stopped since job has timed out!"; ConsoleWriter.WriteErrLine(runResults.ErrorDesc); runResults.TestState = TestState.Error; break; case (int)ParallelRunResult.Error: errorReason = failureReason; runResults.ErrorDesc = errorReason; ConsoleWriter.WriteErrLine(runResults.ErrorDesc); runResults.TestState = TestState.Error; break; case (int)ParallelRunResult.NotStarted: runResults.ErrorDesc = "Failed to start ParallelRunner!"; ConsoleWriter.WriteErrLine(runResults.ErrorDesc); runResults.TestState = TestState.Error; break; default: ConsoleWriter.WriteErrLine(errorReason); runResults.ErrorDesc = errorReason; runResults.TestState = TestState.Error; break; } }
public void Should_remove_tests_for_run_infos_having_run_all_tests() { var results = new TestRunResults("project1", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test1") }); var cache = new RunResultCache(); cache.Merge(results); var runInfo = new TestRunInfo(new Project("project1", new ProjectDocument(ProjectType.CSharp)), "assembly"); var locator = new RemovedTestsLocator(cache); var output = locator.RemoveUnmatchedRunInfoTests(new TestRunResults[] {}, new TestRunInfo[] { runInfo }); output.Count.ShouldEqual(1); output[0].Passed.Length.ShouldEqual(1); output[0].Passed[0].Name.ShouldEqual("Test1"); }
private void moveTestsBetweenStates(TestRunResults results, TestResult[] newSstate, Dictionary <int, TestItem> oldState) { foreach (var test in newSstate) { var item = new TestItem(results.Assembly, results.Project, test); var changed = oldState .Where(x => x.Value.IsTheSameTestAs(item)) .Select(x => x.Key) .ToArray(); if (changed.Length > 0) { //logTest("Removing test that changed state ", item); foreach (var changedItem in changed) { oldState.Remove(changedItem); } } } }
public void Should_run_tests() { _project.Value.SetOutputPath(""); _project.Value.SetAssemblyName("someProject.dll"); var info = new TestRunInfo(_project, "someProject.dll"); _listGenerator.Stub(l => l.Generate(null)).IgnoreArguments().Return(new string[] { "some file.csproj" }); _configuration.Stub(c => c.BuildExecutable(_project.Value)).Return("invalid_to_not_run_builds.exe"); _testRunner.Stub(t => t.CanHandleTestFor(info.Assembly)).IgnoreArguments().Return(true); var result = new TestRunResults[] { new TestRunResults("", "", false, TestRunner.NUnit, new TestResult[] { }) }; _testRunner.Stub(t => t.RunTests(new TestRunInfo[] { info }, null, null)).IgnoreArguments() .Return(result); _removedTestLocator.Stub(r => r.SetRemovedTestsAsPassed(null, null)).IgnoreArguments().Return(result[0]); var message = new ProjectChangeMessage(); message.AddFile(new ChangedFile("some file.csproj")); _consumer.Consume(message); _testRunner.AssertWasCalled(t => t.RunTests(new TestRunInfo[] { new TestRunInfo(null, "") }, null, null), t => t.IgnoreArguments()); }
public TestRunResults GetTestRunResults(Guid testRunId) { if (!_testRunExecutions.ContainsKey(testRunId)) { throw new TestRunNotFoundException($"Test run with id {testRunId} was not found."); } TestRunExecutionDetails runDetails = _testRunExecutions[testRunId]; ITestRunExecutor executor = runDetails.Executor; TestRunResults results = new TestRunResults { TestOutput = executor.StandardOutput, ErrorOutput = executor.ErrorOutput, RunStatus = MapRunStatus(executor.Status), TotalRuntime = executor.ElapsedTime, TestRunId = testRunId, TestSuiteName = executor.TestSuiteName }; return(results); }
public void Should_find_test_delta_since_last_pop() { _runResultCache.EnabledDeltas(); var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Passing test name", "Message", new IStackLine[] { }), new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Some failing test", "Message", new IStackLine[] { }), new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message 1", new IStackLine[] { }) }); _runResultCache.Merge(runResults); var deltas = _runResultCache.PopDeltas(); deltas.AddedTests.Length.ShouldEqual(2); runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Passing test name", "Message", new IStackLine[] { }), new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Failing test that will pass", "Message", new IStackLine[] { }), new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message 2", new IStackLine[] { }) }); _runResultCache.Merge(runResults); runResults = new TestRunResults("project", "assembly", true, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Failing test that will pass", "Message", new IStackLine[] { }), new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message 3", new IStackLine[] { }) }); _runResultCache.Merge(runResults); deltas = _runResultCache.PopDeltas(); deltas.AddedTests.Length.ShouldEqual(1); deltas.AddedTests[0].Value.Name.ShouldEqual("Test name"); deltas.AddedTests[0].Value.Message.ShouldEqual("Message 3"); deltas.AddedTests[0].Value.Status.ShouldEqual(TestRunStatus.Failed); deltas.RemovedTests.Length.ShouldEqual(1); deltas.RemovedTests[0].Value.Name.ShouldEqual("Test name"); deltas.RemovedTests[0].Value.Status.ShouldEqual(TestRunStatus.Failed); }
public void Consume(AssemblyChangeMessage message) { _isRunning = true; var runReport = new RunReport(); try { informParticipants(message); var runInfos = getRunInfos(message); var preProcessed = preProcess(runInfos); preProcessed = new PreProcessedTesRuns(preProcessed.ProcessWrapper, new TestRunInfoMerger(preProcessed.RunInfos).MergeWith(_abortedTestRuns).ToArray()); foreach (var runner in _testRunners) { runTest(runner, preProcessed, runReport); if (_exit) { _abortedTestRuns.Clear(); _abortedTestRuns.AddRange(preProcessed.RunInfos); break; } } } catch (Exception ex) { var result = new TestRunResults("", "", false, TestRunner.Any, new TestResult[] { new TestResult(TestRunner.Any, TestRunStatus.Failed, "AutoTest.Net internal error", ex.ToString()) }); _bus.Publish(new TestRunMessage(result)); } if (_exit) { runReport.WasAborted(); } _bus.Publish(new RunFinishedMessage(runReport)); if (!_exit) { _abortedTestRuns.Clear(); } _exit = false; _isRunning = false; }
private TestRunResults[] getResults(IEnumerable <AutoTest.TestRunners.Shared.Results.TestResult> tests, TestRunInfo[] runInfos) { var results = new List <TestRunResults>(); foreach (var byRunner in tests.GroupBy(x => x.Runner)) { var runner = TestRunnerConverter.FromString(byRunner.Key); foreach (var byAssembly in byRunner.GroupBy(x => x.Assembly)) { var info = runInfos.Where(x => x.Assembly.Equals(byAssembly.Key)).FirstOrDefault(); var project = ""; var partial = false; if (info != null) { if (info.Project != null) { project = info.Project.Key; } partial = info.OnlyRunSpcifiedTestsFor(runner) || info.GetTestsFor(runner).Count() > 0 || info.GetMembersFor(runner).Count() > 0 || info.GetNamespacesFor(runner).Count() > 0; } DebugLog.Debug.WriteDetail(string.Format("Partial run is {0} for runner {1}", partial, runner)); var result = new TestRunResults( project, byAssembly.Key, partial, runner, byAssembly.Select(x => ConvertResult(x)).ToArray()); result.SetTimeSpent(TimeSpan.FromMilliseconds(byAssembly.Sum(x => x.DurationInMilliseconds))); results.Add(result); } } return(results.ToArray()); }
public void Should_serialize_test_run_message() { var testResults = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Test name", "message", new IStackLine[] { new StackLineMessage("method name", "file", 13) }, 34) }; var results = new TestRunResults("project 1", "assembly", false, TestRunner.NUnit, testResults); results.SetTimeSpent(new TimeSpan(12345)); var message = new TestRunMessage(results); var output = serializeDeserialize <TestRunMessage>(message); output.Results.Project.ShouldEqual("project 1"); output.Results.Assembly.ShouldEqual("assembly"); output.Results.IsPartialTestRun.ShouldBeFalse(); output.Results.TimeSpent.ShouldEqual(new TimeSpan(12345)); output.Results.All.Length.ShouldEqual(1); output.Results.All[0].Runner.ShouldEqual(TestRunner.NUnit); output.Results.All[0].Status.ShouldEqual(TestRunStatus.Passed); output.Results.All[0].Name.ShouldEqual("Test name"); output.Results.All[0].Message.ShouldEqual("message"); output.Results.All[0].StackTrace[0].Method.ShouldEqual("method name"); output.Results.All[0].StackTrace[0].File.ShouldEqual("file"); output.Results.All[0].StackTrace[0].LineNumber.ShouldEqual(13); output.Results.All[0].TimeSpent.TotalMilliseconds.ShouldEqual(34); }
public void Should_not_merge_same_ignored_tests_from_different_assemblies() { var results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] { }) }; var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] { }) }; runResults = new TestRunResults("project", "another assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); _runResultCache.Ignored.Length.ShouldEqual(2); }
void IPreProcessTestruns.RunFinished(TestRunResults[] results) { }
public void Merge(TestRunResults results) { HasMergedTestResults = true; }
private void generateAnalysisReport(TestRunResults runDesc) { string lrrLocation = Path.Combine(runDesc.ReportLocation, LRR_FOLDER, LRR_FOLDER + ".lrr"); string lraLocation = Path.Combine(runDesc.ReportLocation, LRA_FOLDER, LRA_FOLDER + ".lra"); string htmlLocation = Path.Combine(runDesc.ReportLocation, HTML_FOLDER, HTML_FOLDER + ".html"); ProcessStartInfo analysisRunner = new ProcessStartInfo(); analysisRunner.FileName = ANALYSIS_LAUNCHER; analysisRunner.Arguments = "\""+lrrLocation + "\" \"" + lraLocation + "\" \"" + htmlLocation+"\""; analysisRunner.UseShellExecute = false; analysisRunner.RedirectStandardOutput = true; ConsoleWriter.WriteLine("executing Analysis launcher with arguments : "+analysisRunner.Arguments); ConsoleWriter.WriteLine("time for analysis: " + _perScenarioTimeOutMinutes.ToString(@"dd\:\:hh\:mm\:ss")); analysisRunner.RedirectStandardOutput = true; analysisRunner.RedirectStandardError = true; Process runner = Process.Start(analysisRunner); if (runner != null) { runner.OutputDataReceived += runner_OutputDataReceived; runner.ErrorDataReceived += runner_ErrorDataReceived; runner.BeginOutputReadLine(); runner.BeginErrorReadLine(); Stopwatch analysisStopWatch = Stopwatch.StartNew(); while (!runner.WaitForExit(_pollingInterval * 1000) && analysisStopWatch.Elapsed < _perScenarioTimeOutMinutes) ; analysisStopWatch.Stop(); runner.CancelOutputRead(); runner.CancelErrorRead(); ConsoleWriter.WriteLine("time passed: " + analysisStopWatch.Elapsed.ToString(@"dd\:\:hh\:mm\:ss")); if (analysisStopWatch.Elapsed > _perScenarioTimeOutMinutes) { runDesc.ErrorDesc = Resources.LrAnalysisTimeOut; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; if (!runner.HasExited) { runner.Kill(); } } //ConsoleWriter.WriteLine("checking error code"); if (runner.ExitCode != (int)Launcher.ExitCodeEnum.Passed) { runDesc.ErrorDesc = Resources.LrAnalysisRunTimeError; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; } //using (StreamReader reader = runner.StandardOutput) //{ // string result = reader.ReadToEnd(); // ConsoleWriter.WriteLine(Resources.LrAnlysisResults); // ConsoleWriter.WriteLine(""); // ConsoleWriter.WriteLine(result); //} } else { runDesc.ErrorDesc = Resources.LrAnlysisInitFail; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; } }
public void Should_find_test_delta_since_last_pop_with_different_runners() { var locator = new AutoTest.Core.Messaging.MessageConsumers.RemovedTestsLocator(_runResultCache); _runResultCache.EnabledDeltas(); var infos = new AutoTest.Core.Messaging.MessageConsumers.TestRunInfo[] { new AutoTest.Core.Messaging.MessageConsumers.TestRunInfo(new AutoTest.Core.Caching.Projects.Project("project", new AutoTest.Core.Caching.Projects.ProjectDocument(AutoTest.Core.Caching.Projects.ProjectType.CSharp)), "assembly") }; infos[0].AddTestsToRun(new AutoTest.Core.Messaging.MessageConsumers.TestToRun[] { new AutoTest.Core.Messaging.MessageConsumers.TestToRun(TestRunner.NUnit, "Test name") }); infos[0].ShouldOnlyRunSpcifiedTestsFor(TestRunner.NUnit); infos[0].ShouldOnlyRunSpcifiedTestsFor(TestRunner.MSTest); var runResults = new TestRunResults("project", "assembly", true, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message 1", new IStackLine[] { }) }); runResults = locator.SetRemovedTestsAsPassed(runResults, infos); runResults.Passed.Length.ShouldEqual(0); _runResultCache.Merge(runResults); var deltas = _runResultCache.PopDeltas(); deltas.AddedTests.Length.ShouldEqual(1); deltas.RemovedTests.Length.ShouldEqual(0); runResults = new TestRunResults("project", "assembly", false, TestRunner.XUnit, new TestResult[] { }); runResults = locator.SetRemovedTestsAsPassed(runResults, infos); _runResultCache.Merge(runResults); deltas = _runResultCache.PopDeltas(); deltas.AddedTests.Length.ShouldEqual(0); deltas.RemovedTests.Length.ShouldEqual(0); }
public void Should_find_test_deltas_in_same_status() { _runResultCache.EnabledDeltas(); var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] { }) }); _runResultCache.Merge(runResults); _runResultCache.PopDeltas(); runResults = new TestRunResults("project", "assembly", true, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message 2", new IStackLine[] { }) }); _runResultCache.Merge(runResults); var deltas = _runResultCache.PopDeltas(); deltas.AddedTests.Length.ShouldEqual(1); deltas.AddedTests[0].Value.Name.ShouldEqual("Test name"); deltas.AddedTests[0].Value.Status.ShouldEqual(TestRunStatus.Ignored); deltas.AddedTests[0].Value.Message.ShouldEqual("Message 2"); deltas.RemovedTests.Length.ShouldEqual(1); deltas.RemovedTests[0].Value.Name.ShouldEqual("Test name"); }
/// <summary> /// Runs the provided test on all the environments. /// </summary> /// <param name="testInfo"> The test information. </param> /// <param name="errorReason"> failure reason </param> /// <param name="runCancelled"> delegate to RunCancelled </param> /// <returns> /// The run results for the current test. /// </returns> public TestRunResults RunTest(TestInfo testInfo, ref string errorReason, RunCancelledDelegate runCancelled) { // change the DCOM setting for qtp application Helper.ChangeDCOMSettingToInteractiveUser(); testInfo.ReportPath = testInfo.TestPath + @"\ParallelReport"; // this is to make sure that we do not overwrite the report // when we run the same test multiple times on the same build string resFolder = Helper.GetNextResFolder(testInfo.ReportPath, "Res"); var runResults = new TestRunResults { ReportLocation = testInfo.ReportPath, ErrorDesc = errorReason, TestState = TestState.Unknown, TestPath = testInfo.TestPath, TestType = TestType.ParallelRunner.ToString() }; // set the active test run ConsoleWriter.ActiveTestRun = runResults; if (!_canRun) { ConsoleWriter.WriteLine("Could not find parallel runner executable!"); errorReason = Resources.ParallelRunnerExecutableNotFound; runResults.TestState = TestState.Error; runResults.ErrorDesc = errorReason; return(runResults); } // Try to create the ParalleReport path try { Directory.CreateDirectory(runResults.ReportLocation); }catch (Exception) { errorReason = string.Format(Resources.FailedToCreateTempDirError, runResults.ReportLocation); runResults.TestState = TestState.Error; runResults.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runResults); } ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " => Using ParallelRunner to execute test: " + testInfo.TestPath); _runCancelled = runCancelled; // prepare the json file for the process var configFilePath = string.Empty; try { configFilePath = ParallelRunnerEnvironmentUtil.GetConfigFilePath(testInfo, _mcConnectionInfo, _environments); _configFiles.Add(configFilePath); }catch (ParallelRunnerConfigurationException ex) // invalid configuration { errorReason = ex.Message; runResults.ErrorDesc = errorReason; runResults.TestState = TestState.Error; return(runResults); } // Parallel runner argument "-c" for config path and "-o static" so that // the output from ParallelRunner is compatible with Jenkins var arguments = String.Format(ParallelRunnerArguments, configFilePath); // the test can be started now runResults.TestState = TestState.Running; var runTime = new Stopwatch(); runTime.Start(); string failureReason = null; runResults.ErrorDesc = null; // execute parallel runner and get the run result status int exitCode = ExecuteProcess(_parallelRunnerPath, arguments, ref failureReason); // set the status of the build based on the exit code RunResultsFromParallelRunnerExitCode(runResults, exitCode, failureReason, ref errorReason); // update the run time runResults.Runtime = runTime.Elapsed; // update the report location as the report should be // generated by now runResults.ReportLocation = resFolder; return(runResults); }
public void Should_run_tests() { _project.Value.SetAsNUnitTestContainer(); _project.Value.SetOutputPath(""); _project.Value.SetAssemblyName("someProject.dll"); _listGenerator.Stub(l => l.Generate(null)).IgnoreArguments().Return(new string[] { "some file.csproj" }); _configuration.Stub(c => c.BuildExecutable(_project.Value)).Return("invalid_to_not_run_builds.exe"); _testRunner.Stub(t => t.CanHandleTestFor(_project.Value)).Return(true); var result = new TestRunResults[] { new TestRunResults("", "", false, new TestResult[] { }) }; _testRunner.Stub(t => t.RunTests(new TestRunInfo[] { new TestRunInfo(_project, "") })).IgnoreArguments() .Return(result); _removedTestLocator.Stub(r => r.SetRemovedTestsAsPassed(null, null)).IgnoreArguments().Return(result[0]); var message = new ProjectChangeMessage(); message.AddFile(new ChangedFile("some file.csproj")); _consumer.Consume(message); _testRunner.AssertWasCalled(t => t.RunTests(new TestRunInfo[] { new TestRunInfo(null, "") }), t => t.IgnoreArguments()); }
public static void Main(string[] args) { PrintUsage(); TestRunScheduler scheduler = new TestRunScheduler(); bool exit = false; while (!exit) { string userInput = Console.ReadLine(); string invokedVerb = null; object invokedVerbInstance = null; var options = new Options(); if (!CommandLine.Parser.Default.ParseArguments(userInput.Split(' '), options, (verb, subOptions) => { invokedVerb = verb; invokedVerbInstance = subOptions; })) { Console.WriteLine($"Unrecognized command: {userInput}."); PrintUsage(); continue; } switch (invokedVerb) { case "exit": exit = true; Console.WriteLine("Exiting test runner."); scheduler.CancelAllTestRuns(); continue; case "start": var startRunOptions = (StartNewTestRunOptions)invokedVerbInstance; Console.WriteLine($"Starting test run of {startRunOptions.TestSuiteName}."); Guid testRunId = scheduler.StartNewTestRun(startRunOptions.TestSuiteName); Console.WriteLine($"Started test run with id {testRunId}."); break; case "status": var statusOptions = (CommonOptions)invokedVerbInstance; try { Console.WriteLine($"Retrieving status of test run with id {statusOptions.TestRunGuid}."); TestRunStatus status = scheduler.GetTestRunStatus(Guid.Parse(statusOptions.TestRunGuid)); Console.WriteLine(status); } catch (TestRunNotFoundException ex) { Console.WriteLine("Failed to retrieve status of test run:"); Console.WriteLine(ex.Message); } break; case "results": var resultsOptions = (CommonOptions)invokedVerbInstance; try { Console.WriteLine($"Retrieving results of test run with id {resultsOptions.TestRunGuid}."); TestRunResults results = scheduler.GetTestRunResults(Guid.Parse(resultsOptions.TestRunGuid)); Console.WriteLine(results); } catch (TestRunNotFoundException ex) { Console.WriteLine("Failed to retrieve results of test run:"); Console.WriteLine(ex.Message); } break; case "cancel": var cancelRunOptions = (CommonOptions)invokedVerbInstance; try { Console.WriteLine($"Canceling test run with id {cancelRunOptions.TestRunGuid}."); scheduler.CancelTestRun(Guid.Parse(cancelRunOptions.TestRunGuid)); Console.WriteLine("Test run canceled."); } catch (TestRunNotFoundException ex) { Console.WriteLine("Failed to cancel test run:"); Console.WriteLine(ex.Message); } break; } } }
public void Should_merge_ignored_tests() { var results = new TestResult[] { new TestResult(TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] { }) }; var runResults = new TestRunResults("project", "assembly", results); _runResultCache.Merge(runResults); results = new TestResult[] { new TestResult(TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] { }) }; runResults = new TestRunResults("project", "assembly", results); _runResultCache.Merge(runResults); _runResultCache.Ignored.Length.ShouldEqual(1); }
public void Should_merge_tests_going_from_ignored_to_failed() { var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] { }) }); _runResultCache.Merge(runResults); runResults = new TestRunResults("project", "assembly", true, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Another message", new IStackLine[] { }) }); _runResultCache.Merge(runResults); _runResultCache.Ignored.Length.ShouldEqual(0); _runResultCache.Failed.Length.ShouldEqual(1); }
private void removeChanged(TestRunResults results) { _failed.RemoveAll(x => x.Value.Runner == TestRunner.Any); _ignored.RemoveAll(x => x.Value.Runner == TestRunner.Any); foreach (var test in results.Passed) { var item = new TestItem(results.Assembly, results.Project, test); removeIfExists(item, _ignored); removeIfExists(item, _failed); } moveTestsBetweenStates(results, results.Failed, _ignored); moveTestsBetweenStates(results, results.Ignored, _failed); }
public void Should_merge_changed_tests_from_the_same_category() { var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message", new IStackLine[] { }) }); _runResultCache.Merge(runResults); runResults = new TestRunResults("project", "assembly", true, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message", new IStackLine[] { new StackLineMessage("method", "file", 10) }) }); _runResultCache.Merge(runResults); _runResultCache.Failed.Length.ShouldEqual(1); _runResultCache.Failed[0].Value.StackTrace.Length.ShouldEqual(1); }
private void moveTestsBetweenStates(TestRunResults results, TestResult[] newSstate, Dictionary<int, TestItem> oldState) { foreach (var test in newSstate) { var item = new TestItem(results.Assembly, results.Project, test); var changed = oldState .Where(x => x.Value.IsTheSameTestAs(item)) .Select(x => x.Key) .ToArray(); if (changed.Length > 0) { //logTest("Removing test that changed state ", item); foreach (var changedItem in changed) oldState.Remove(changedItem); } } }
public void RunFinished(TestRunResults[] resultset) { _isFullRun = false; // Reset always. Will be set again if needed _lastFailures.Clear(); ThreadPool.QueueUserWorkItem(x => LoadProfilerData(null)); }
public TestRunResults RunTest(TestInfo scenarioInf, ref string errorReason, RunCancelledDelegate runCancelled) { string scenarioPath = scenarioInf.TestPath; //prepare the instance that will contain test results for JUnit TestRunResults runDesc = new TestRunResults(); ConsoleWriter.ActiveTestRun = runDesc; ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running: " + scenarioPath); runDesc.TestType = TestType.LoadRunner.ToString(); _resultsFolder = Helper.GetTempDir(); //a directory with this name may already exist. try to delete it. if (Directory.Exists(_resultsFolder)) { try { // Directory.Delete(_resultsFolder, true); DirectoryInfo dir = new DirectoryInfo(_resultsFolder); dir.GetFiles().ToList().ForEach(file => file.Delete()); dir.GetDirectories().ToList().ForEach(subdir => subdir.Delete()); } catch (Exception) { Console.WriteLine(string.Format(Resources.CannotDeleteReportFolder, _resultsFolder)); } } else { try { Directory.CreateDirectory(_resultsFolder); } catch (Exception e) { errorReason = string.Format(Resources.FailedToCreateTempDirError, _resultsFolder); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return runDesc; } } //create LRR folder: _controller_result_dir = Path.Combine(_resultsFolder, LRR_FOLDER); Directory.CreateDirectory(_controller_result_dir); //init result params runDesc.ErrorDesc = errorReason; runDesc.TestPath = scenarioPath; runDesc.TestState = TestState.Unknown; if (!Helper.isLoadRunnerInstalled()) { runDesc.TestState = TestState.Error; runDesc.ErrorDesc = string.Format(Resources.LoadRunnerNotInstalled, System.Environment.MachineName); ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return runDesc; } //from here on, we may delegate runCancelled(). _runCancelled = runCancelled; //start scenario stop watch Stopwatch scenarioStopWatch = Stopwatch.StartNew(); //set state to running runDesc.TestState = TestState.Running; //and run the scenario bool res = runScenario(scenarioPath, ref errorReason, runCancelled); if (!res) { //runScenario failed. print the error and set test as failed ConsoleWriter.WriteErrLine(errorReason); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; runDesc.Runtime = scenarioStopWatch.Elapsed; //and try to close the controller closeController(); return runDesc; } else { try { ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); runDesc.ReportLocation = _resultsFolder; ConsoleWriter.WriteLine(Resources.LrAnalysingResults); //close the controller, so Analysis can be opened ConsoleWriter.WriteLine("closing Controller"); closeController(); ConsoleWriter.WriteLine("Controller closed"); //generate report using Analysis: ConsoleWriter.WriteLine("calling analysis report generator"); generateAnalysisReport(runDesc); ConsoleWriter.WriteLine("analysis report generator finished"); //check for errors: if (File.Exists(Path.Combine(_resultsFolder, "errors.xml"))) { checkForErrors(); } ConsoleWriter.WriteLine(Resources.LRErrorsSummary); //count how many ignorable errors and how many fatal errors occured. int ignore = getErrorsCount(ERRORState.Ignore); int fatal = getErrorsCount(ERRORState.Error); ConsoleWriter.WriteLine(String.Format(Resources.LrErrorSummeryNum, ignore, fatal)); ConsoleWriter.WriteLine(""); if (_errors != null && _errors.Count > 0) { foreach (ERRORState state in Enum.GetValues(typeof(ERRORState))) { ConsoleWriter.WriteLine(printErrorSummary(state)); } } //if scenario ended with fatal errors, change test state if (fatal > 0) { ConsoleWriter.WriteErrLine(string.Format(Resources.LRTestFailDueToFatalErrors, fatal)); errorReason = buildErrorReasonForErrors(); runDesc.TestState = TestState.Failed; } else if (ignore > 0) { ConsoleWriter.WriteLine(string.Format(Resources.LRTestWarningDueToIgnoredErrors, ignore)); runDesc.HasWarnings = true; runDesc.TestState = TestState.Warning; } else { Console.WriteLine(Resources.LRTestPassed); runDesc.TestState = TestState.Passed; } } catch (Exception e) { ConsoleWriter.WriteException(Resources.LRExceptionInAnalysisRunner, e); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = Resources.LRExceptionInAnalysisRunner; runDesc.Runtime = scenarioStopWatch.Elapsed; } //runDesc.ReportLocation = _resultsFolder; } runDesc.Runtime = scenarioStopWatch.Elapsed; if (!string.IsNullOrEmpty(errorReason)) runDesc.ErrorDesc = errorReason; closeController(); return runDesc; }
public void Should_remove_cached_failed_tests_that_now_passes() { var results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message", new IStackLine[] { }) }; var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Passed, "Test name", "", new IStackLine[] { }) }; runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); Assert.AreEqual(0, _runResultCache.Failed.Length); _runResultCache.Failed.Length.ShouldEqual(0); }
public void CloseReport(TestRunResults testRunResluts) { throw new NotImplementedException(); }
public void Should_add_ignored_tests() { var results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test name", "Message", new IStackLine[] {}) }; var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); _runResultCache.Ignored.Length.ShouldEqual(1); _runResultCache.Ignored[0].Key.ShouldEqual("assembly"); _runResultCache.Ignored[0].Project.ShouldEqual("project"); }
private void generateAnalysisReport(TestRunResults runDesc) { string lrrLocation = Path.Combine(runDesc.ReportLocation, LRR_FOLDER, LRR_FOLDER + ".lrr"); string lraLocation = Path.Combine(runDesc.ReportLocation, LRA_FOLDER, LRA_FOLDER + ".lra"); string htmlLocation = Path.Combine(runDesc.ReportLocation, HTML_FOLDER, HTML_FOLDER + ".html"); ProcessStartInfo analysisRunner = new ProcessStartInfo(); analysisRunner.FileName = ANALYSIS_LAUNCHER; analysisRunner.Arguments = "\"" + lrrLocation + "\" \"" + lraLocation + "\" \"" + htmlLocation + "\" \"" + _analysisTemplate + "\""; analysisRunner.UseShellExecute = false; analysisRunner.RedirectStandardOutput = true; ConsoleWriter.WriteLine("executing Analysis launcher with arguments : " + analysisRunner.Arguments); ConsoleWriter.WriteLine("time for analysis: " + _perScenarioTimeOutMinutes.ToString(@"dd\:\:hh\:mm\:ss")); analysisRunner.RedirectStandardOutput = true; analysisRunner.RedirectStandardError = true; Process runner = Process.Start(analysisRunner); if (runner != null) { runner.OutputDataReceived += runner_OutputDataReceived; runner.ErrorDataReceived += runner_ErrorDataReceived; runner.BeginOutputReadLine(); runner.BeginErrorReadLine(); Stopwatch analysisStopWatch = Stopwatch.StartNew(); while (!runner.WaitForExit(_pollingInterval * 1000) && analysisStopWatch.Elapsed < _perScenarioTimeOutMinutes) { ; } analysisStopWatch.Stop(); runner.CancelOutputRead(); runner.CancelErrorRead(); ConsoleWriter.WriteLine("time passed: " + analysisStopWatch.Elapsed.ToString(@"dd\:\:hh\:mm\:ss")); if (analysisStopWatch.Elapsed > _perScenarioTimeOutMinutes) { runDesc.ErrorDesc = Resources.LrAnalysisTimeOut; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; if (!runner.HasExited) { runner.Kill(); } } //ConsoleWriter.WriteLine("checking error code"); if (runner.ExitCode != (int)Launcher.ExitCodeEnum.Passed) { runDesc.ErrorDesc = Resources.LrAnalysisRunTimeError; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; } //using (StreamReader reader = runner.StandardOutput) //{ // string result = reader.ReadToEnd(); // ConsoleWriter.WriteLine(Resources.LrAnlysisResults); // ConsoleWriter.WriteLine(""); // ConsoleWriter.WriteLine(result); //} } else { runDesc.ErrorDesc = Resources.LrAnlysisInitFail; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; } }
/// <summary> /// Runs the provided test on all the environments. /// </summary> /// <param name="testInfo"> The test information. </param> /// <param name="errorReason"> failure reason </param> /// <param name="runCancelled"> delegate to RunCancelled </param> /// <returns> /// The run results for the current test. /// </returns> public TestRunResults RunTest(TestInfo testInfo, ref string errorReason, RunCancelledDelegate runCancelled) { ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running in parallel: " + testInfo.TestPath); if (string.IsNullOrWhiteSpace(testInfo.ReportPath)) { // maybe the report base directory is set, if so, // the report path for parallel runner shall be populated here if (!string.IsNullOrWhiteSpace(testInfo.ReportBaseDirectory)) { // "<report-base-dir>\<test-name>_ParallelReport" testInfo.ReportPath = Path.Combine(testInfo.ReportBaseDirectory, testInfo.TestName.Substring(testInfo.TestName.LastIndexOf('\\') + 1) + "_ParallelReport"); ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Report path is generated under base directory: " + testInfo.ReportPath); } else { // neither ReportPath nor ReportBaseDirectory is given, use default report path: // "<TestPath>\ParallelReport" testInfo.ReportPath = testInfo.TestPath + @"\ParallelReport"; ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Report path is automatically generated: " + testInfo.ReportPath); } } else { ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Report path is set explicitly: " + testInfo.ReportPath); } // this is to make sure that we do not overwrite the report // when we run the same test multiple times on the same build string resFolder = Helper.GetNextResFolder(testInfo.ReportPath, "Res"); var runResults = new TestRunResults { ReportLocation = testInfo.ReportPath, ErrorDesc = errorReason, TestState = TestState.Unknown, TestPath = testInfo.TestPath, TestType = TestType.ParallelRunner.ToString() }; // set the active test run ConsoleWriter.ActiveTestRun = runResults; if (!_canRun) { ConsoleWriter.WriteLine("Could not find parallel runner executable!"); errorReason = Resources.ParallelRunnerExecutableNotFound; runResults.TestState = TestState.Error; runResults.ErrorDesc = errorReason; return(runResults); } // change the DCOM setting for qtp application Helper.ChangeDCOMSettingToInteractiveUser(); // try to check if the UFT process already exists bool uftProcessExist = false; bool isNewInstance; using (Mutex m = new Mutex(true, "per_process_mutex_UFT", out isNewInstance)) { if (!isNewInstance) { uftProcessExist = true; } } // try to get qtp status via qtp automation object since the uft process exists if (uftProcessExist) { var type = Type.GetTypeFromProgID("Quicktest.Application"); var qtpApplication = Activator.CreateInstance(type) as QTObjectModelLib.Application; bool needKillUFTProcess = false; // status: Not launched / Ready / Busy / Running / Recording / Waiting / Paused string status = qtpApplication.GetStatus(); switch (status) { case "Not launched": if (uftProcessExist) { // UFT process exist but the status retrieved from qtp automation object is Not launched // it means the UFT is launched but not shown the main window yet // in which case it shall be considered as UFT is not used at all // so here can kill the UFT process to continue needKillUFTProcess = true; } break; case "Ready": case "Waiting": // UFT is launched but not running or recording, shall be considered as UFT is not used // so here can kill the UFT process to continue needKillUFTProcess = true; break; case "Busy": case "Running": case "Recording": case "Paused": // UFT is launched and somehow in use now, shouldn't kill UFT process // here make the test fail errorReason = Resources.UFT_Running; runResults.ErrorDesc = errorReason; runResults.TestState = TestState.Error; return(runResults); default: // by default, let the tool run test, the behavior might be unexpected break; } if (needKillUFTProcess) { Process[] procs = Process.GetProcessesByName("uft"); if (procs != null) { foreach (Process proc in procs) { proc.Kill(); } } } } // Try to create the ParalleReport path try { Directory.CreateDirectory(runResults.ReportLocation); } catch (Exception) { errorReason = string.Format(Resources.FailedToCreateTempDirError, runResults.ReportLocation); runResults.TestState = TestState.Error; runResults.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runResults); } ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " => Using ParallelRunner to execute test: " + testInfo.TestPath); _runCancelled = runCancelled; // prepare the json file for the process var configFilePath = string.Empty; try { configFilePath = ParallelRunnerEnvironmentUtil.GetConfigFilePath(testInfo, _mcConnectionInfo, _environments); _configFiles.Add(configFilePath); } catch (ParallelRunnerConfigurationException ex) // invalid configuration { errorReason = ex.Message; runResults.ErrorDesc = errorReason; runResults.TestState = TestState.Error; return(runResults); } // Parallel runner argument "-c" for config path and "-o static" so that // the output from ParallelRunner is compatible with Jenkins var arguments = String.Format(ParallelRunnerArguments, configFilePath); // the test can be started now runResults.TestState = TestState.Running; var runTime = new Stopwatch(); runTime.Start(); string failureReason = null; runResults.ErrorDesc = null; // execute parallel runner and get the run result status int exitCode = ExecuteProcess(_parallelRunnerPath, arguments, ref failureReason); // set the status of the build based on the exit code RunResultsFromParallelRunnerExitCode(runResults, exitCode, failureReason, ref errorReason); // update the run time runResults.Runtime = runTime.Elapsed; // update the report location as the report should be // generated by now runResults.ReportLocation = resFolder; return(runResults); }
public TestRunResults RunTest(TestInfo scenarioInf, ref string errorReason, RunCancelledDelegate runCancelled) { string scenarioPath = scenarioInf.TestPath; //prepare the instance that will contain test results for JUnit TestRunResults runDesc = new TestRunResults(); ConsoleWriter.ActiveTestRun = runDesc; ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running: " + scenarioPath); runDesc.TestType = TestType.LoadRunner.ToString(); _resultsFolder = Helper.GetTempDir(); //a directory with this name may already exist. try to delete it. if (Directory.Exists(_resultsFolder)) { try { // Directory.Delete(_resultsFolder, true); DirectoryInfo dir = new DirectoryInfo(_resultsFolder); dir.GetFiles().ToList().ForEach(file => file.Delete()); dir.GetDirectories().ToList().ForEach(subdir => subdir.Delete()); } catch (Exception) { Console.WriteLine(string.Format(Resources.CannotDeleteReportFolder, _resultsFolder)); } } else { try { Directory.CreateDirectory(_resultsFolder); } catch (Exception e) { errorReason = string.Format(Resources.FailedToCreateTempDirError, _resultsFolder); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runDesc); } } //create LRR folder: _controller_result_dir = Path.Combine(_resultsFolder, LRR_FOLDER); Directory.CreateDirectory(_controller_result_dir); //init result params runDesc.ErrorDesc = errorReason; runDesc.TestPath = scenarioPath; ConsoleWriter.WriteLine(runDesc.TestPath); runDesc.TestState = TestState.Unknown; if (!Helper.isLoadRunnerInstalled()) { runDesc.TestState = TestState.Error; runDesc.ErrorDesc = string.Format(Resources.LoadRunnerNotInstalled, System.Environment.MachineName); ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed; return(runDesc); } //from here on, we may delegate runCancelled(). _runCancelled = runCancelled; //start scenario stop watch Stopwatch scenarioStopWatch = Stopwatch.StartNew(); //set state to running runDesc.TestState = TestState.Running; //and run the scenario bool res = runScenario(scenarioPath, ref errorReason, runCancelled); if (!res) { //runScenario failed. print the error and set test as failed ConsoleWriter.WriteErrLine(errorReason); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = errorReason; runDesc.Runtime = scenarioStopWatch.Elapsed; //and try to close the controller closeController(); return(runDesc); } else { try { ConsoleWriter.WriteLine(Resources.GeneralDoubleSeperator); runDesc.ReportLocation = _resultsFolder; ConsoleWriter.WriteLine(Resources.LrAnalysingResults); //close the controller, so Analysis can be opened ConsoleWriter.WriteLine("closing Controller"); closeController(); ConsoleWriter.WriteLine("Controller closed"); //generate report using Analysis: ConsoleWriter.WriteLine("calling analysis report generator"); generateAnalysisReport(runDesc); ConsoleWriter.WriteLine("analysis report generator finished"); //check for errors: if (File.Exists(Path.Combine(_resultsFolder, "errors.xml"))) { checkForErrors(); } ConsoleWriter.WriteLine(Resources.LRErrorsSummary); //count how many ignorable errors and how many fatal errors occured. int ignore = getErrorsCount(ERRORState.Ignore); int fatal = getErrorsCount(ERRORState.Error); runDesc.FatalErrors = fatal; ConsoleWriter.WriteLine(String.Format(Resources.LrErrorSummeryNum, ignore, fatal)); ConsoleWriter.WriteLine(""); if (_errors != null && _errors.Count > 0) { foreach (ERRORState state in Enum.GetValues(typeof(ERRORState))) { ConsoleWriter.WriteLine(printErrorSummary(state)); } } //if scenario ended with fatal errors, change test state if (fatal > 0) { ConsoleWriter.WriteErrLine(string.Format(Resources.LRTestFailDueToFatalErrors, fatal)); errorReason = buildErrorReasonForErrors(); runDesc.TestState = TestState.Failed; } else if (ignore > 0) { ConsoleWriter.WriteLine(string.Format(Resources.LRTestWarningDueToIgnoredErrors, ignore)); runDesc.HasWarnings = true; runDesc.TestState = TestState.Warning; } else { Console.WriteLine(Resources.LRTestPassed); runDesc.TestState = TestState.Passed; } } catch (Exception e) { ConsoleWriter.WriteException(Resources.LRExceptionInAnalysisRunner, e); runDesc.TestState = TestState.Error; runDesc.ErrorDesc = Resources.LRExceptionInAnalysisRunner; runDesc.Runtime = scenarioStopWatch.Elapsed; } //runDesc.ReportLocation = _resultsFolder; } runDesc.Runtime = scenarioStopWatch.Elapsed; if (!string.IsNullOrEmpty(errorReason)) { runDesc.ErrorDesc = errorReason; } KillController(); return(runDesc); }
public void Should_rerun_test_if_pre_processor_says_so() { _runInfo.ShouldNotBuild(); _project.Value.SetOutputPath(""); _project.Value.SetAssemblyName("someProject.dll"); var info = new TestRunInfo(_project, ""); _listGenerator.Stub(l => l.Generate(null)).IgnoreArguments().Return(new string[] { "some file.csproj" }); _configuration.Stub(c => c.BuildExecutable(_project.Value)).Return("invalid_to_not_run_builds.exe"); var result = new TestRunResults[] { new TestRunResults("", "", false, TestRunner.NUnit, new TestResult[] { }) }; _testRunner.Stub(t => t.CanHandleTestFor(info.Assembly)).IgnoreArguments().Return(true); _testRunner.Stub(t => t.RunTests(new TestRunInfo[] { info })).IgnoreArguments() .Return(result); _runInfo.ShouldRerunAllTestWhenFinishedFor(TestRunner.Any); _removedTestLocator.Stub(r => r.SetRemovedTestsAsPassed(null, null)).IgnoreArguments().Return(result[0]); _testAssemblyValidator.Stub(t => t.ShouldNotTestAssembly("")).IgnoreArguments().Return(false); var message = new ProjectChangeMessage(); message.AddFile(new ChangedFile("some file.csproj")); _consumer.Consume(message); _testRunner.AssertWasCalled(t => t.RunTests(new TestRunInfo[] { new TestRunInfo(null, "") }), t => t.IgnoreArguments().Repeat.Twice()); }
public void RunFinished (TestRunResults[] results) { }
private void generateAnalysisReport(TestRunResults runDesc) { string lrrLocation = Path.Combine(runDesc.ReportLocation, LRR_FOLDER, LRR_FOLDER + ".lrr"); string lraLocation = Path.Combine(runDesc.ReportLocation, LRA_FOLDER, LRA_FOLDER + ".lra"); string htmlLocation = Path.Combine(runDesc.ReportLocation, HTML_FOLDER, HTML_FOLDER + ".html"); ProcessStartInfo analysisRunner = new ProcessStartInfo(); analysisRunner.FileName = ANALYSIS_LAUNCHER; analysisRunner.Arguments = lrrLocation + " " + lraLocation + " " + htmlLocation; analysisRunner.UseShellExecute = false; analysisRunner.RedirectStandardOutput = true; Process runner = Process.Start(analysisRunner); if (runner != null) { Stopwatch analysisStopWatch = Stopwatch.StartNew(); while (!runner.WaitForExit(_pollingInterval * 1000) && analysisStopWatch.Elapsed < _perScenarioTimeOut) ; analysisStopWatch.Stop(); if (analysisStopWatch.Elapsed > _perScenarioTimeOut) { runDesc.ErrorDesc = Resources.LrAnalysisTimeOut; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; if (!runner.HasExited) { runner.Kill(); } } //ConsoleWriter.WriteLine("checking error code"); if (runner.ExitCode != (int)Launcher.ExitCodeEnum.Passed) { runDesc.ErrorDesc = Resources.LrAnalysisRunTimeError; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; } using (StreamReader reader = runner.StandardOutput) { string result = reader.ReadToEnd(); ConsoleWriter.WriteLine(Resources.LrAnlysisResults); ConsoleWriter.WriteLine(""); ConsoleWriter.WriteLine(result); } } else { runDesc.ErrorDesc = Resources.LrAnlysisInitFail; ConsoleWriter.WriteErrLine(runDesc.ErrorDesc); runDesc.TestState = TestState.Error; } }
private void removeChanged(TestRunResults results) { foreach (var test in results.Passed) { var item = new TestItem(results.Assembly, results.Project, test); removeIfExists(item, _ignored); removeIfExists(item, _failed); } moveTestsBetweenStates(results, results.Failed, _ignored); moveTestsBetweenStates(results, results.Ignored, _failed); }
private void moveTestsBetweenStates(TestRunResults results, TestResult[] newSstate, List<TestItem> oldState) { foreach (var test in newSstate) { var item = new TestItem(results.Assembly, results.Project, test); if (oldState.Exists(i => i.IsTheSameTestAs(item))) { logTest("Removing test that changed state ", item); oldState.RemoveAll(i => i.IsTheSameTestAs(item)); } } }
public void Should_not_merge_same_failed_tests_with_different_runners() { var results = new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test name", "Message", new IStackLine[] { }) }; var runResults = new TestRunResults("project", "assembly", false, TestRunner.NUnit, results); _runResultCache.Merge(runResults); results = new TestResult[] { new TestResult(TestRunner.MSTest, TestRunStatus.Failed, "Test name", "Message", new IStackLine[] { }) }; runResults = new TestRunResults("project", "assembly", false, TestRunner.MSTest, results); _runResultCache.Merge(runResults); _runResultCache.Failed.Length.ShouldEqual(2); }