private void ProcessTransforms(ChutzpahTestSettingsFile settings, TestCaseSummary overallSummary, TransformResult results) { // Do this here per settings file in case an individual transformer has any associated state // - we want them fresh var knownTransforms = transformerProvider .GetTransformers(fileSystem) .ToDictionary(x => x.Name, x => x, StringComparer.InvariantCultureIgnoreCase); foreach (var transformConfig in settings.Transforms) { SummaryTransformer transform = null; if (knownTransforms.TryGetValue(transformConfig.Name, out transform)) { var outputPath = transformConfig.Path; if (!fileSystem.IsPathRooted(outputPath) && !string.IsNullOrWhiteSpace(transformConfig.SettingsFileDirectory)) { outputPath = fileSystem.GetFullPath(Path.Combine(transformConfig.SettingsFileDirectory, outputPath)); } // TODO: In future, this would ideally split out the summary to just those parts // relevant to the files associated with the settings file being handled transform.Transform(overallSummary, outputPath); results.AddResult(transform.Name, outputPath); } } }
public void TestSuiteFinished(TestCaseSummary testResultsSummary) { lock (sync) { nestedCallback.TestSuiteFinished(testResultsSummary); } }
private static TestCaseSummary GetTestCaseSummary() { var testCaseSummary = new TestCaseSummary(); testCaseSummary.CoverageObject = new CoverageData(); testCaseSummary.CoverageObject["/no/coverage"] = new CoverageFileData { FilePath = "/no/coverage", LineExecutionCounts = null }; testCaseSummary.CoverageObject["/three/lines/two/covered"] = new CoverageFileData { FilePath = "/three/lines/two/covered", LineExecutionCounts = new int?[] { null, 2, null, 5, 0 } }; testCaseSummary.CoverageObject["/four/lines/four/covered"] = new CoverageFileData { FilePath = "/four/lines/four/covered", LineExecutionCounts = new int?[] { null, 2, 3, 4, 5 } }; return testCaseSummary; }
public override void TestSuiteFinished(TestCaseSummary testResultsSummary) { if (testResultsSummary.CoverageObject != null && testResultsSummary.CoverageObject.Any()) { Console.WriteLine(); PrintCodeCoverageResults(testResultsSummary.CoverageObject); } if (showFailureReport) { PrintErrorReport(testResultsSummary); } Console.WriteLine(); var seconds = testResultsSummary.TotalRuntime / 1000.0; if (testResultsSummary.SkippedCount > 0) { Console.WriteLine("=== {0} total, {1} failed, {2} skipped, took {3:n} seconds ===", testResultsSummary.TotalCount, testResultsSummary.FailedCount, testResultsSummary.SkippedCount, seconds); } else { Console.WriteLine("=== {0} total, {1} failed, took {2:n} seconds ===", testResultsSummary.TotalCount, testResultsSummary.FailedCount, seconds); } base.TestSuiteFinished(testResultsSummary); }
private static TestCaseSummary BuildTestCaseSummary() { var summary = new TestCaseSummary(); var fileSummary = new TestFileSummary("path1"){ TimeTaken = 1500}; fileSummary.AddTestCase(new TestCase { ModuleName = "module1", TestName = "test1", TestResults = new List<TestResult> { new TestResult { Passed = false, Message = "some failure" } } }); fileSummary.AddTestCase(new TestCase { ModuleName = "module1", TestName = "test2", TestResults = new List<TestResult> { new TestResult { Passed = true } } }); var fileSummary2 = new TestFileSummary("path>2") { TimeTaken = 2000 }; fileSummary2.AddTestCase(new TestCase { TestName = "test3", TestResults = new List<TestResult> { new TestResult { Passed = true } } }); fileSummary2.AddTestCase(new TestCase { TestName = "test<4", TestResults = new List<TestResult> { new TestResult { Passed = false, Message = "bad<failure" } } }); summary.Append(fileSummary); summary.Append(fileSummary2); return summary; }
private void PrintErrorReport(TestCaseSummary testResultsSummary) { var failedTests = (from testResult in testResultsSummary.Tests where !testResult.ResultsAllPassed select testResult).ToList(); var fileErrors = testResultsSummary.Errors; if (failedTests.Count > 0 || fileErrors.Count > 0) { Console.WriteLine("\n\n--- Failure Report :: {0} Failed Tests, {1} File Errors ---", failedTests.Count, fileErrors.Count); foreach (var fileError in fileErrors) { FileError(fileError); } foreach (var result in failedTests) { TestFailed(result); } Console.WriteLine("-----------------------------------------"); } }
public override string Transform(TestCaseSummary testFileSummary) { if (testFileSummary == null) throw new ArgumentNullException("testFileSummary"); var document = new System.Xml.XmlDocument(); var testResults = AddTestResultsRoot(testFileSummary, document); foreach (var testFile in testFileSummary.TestFileSummaries) { var testSuite = AddTestSuite(document, testResults, testFile.Path, testFile.PassedCount, testFile.TimeTaken / 1000m, testFile.PassedCount + testFile.FailedCount == testFile.TotalCount, testFile.PassedCount == testFile.TotalCount); var results = testSuite.FirstChild; foreach (var group in testFile.TestGroups) { Int32 total = group.Value.Count; Int32 passed = group.Value.Count(t => t.ResultsAllPassed); decimal time = group.Value.Sum(t => t.TimeTaken) / 1000m; var groupSuite = AddTestSuite(document, results, group.Key, passed, time, total > 0, total == passed); foreach (var test in group.Value) { AddTestCase(test, groupSuite.FirstChild, document); } } } return GenerateReportXml(document); }
public override void TestSuiteFinished(TestCaseSummary testResultsSummary) { var statusBarText = string.Format("{0} passed, {1} failed, {2} total", testResultsSummary.PassedCount, testResultsSummary.FailedCount, testResultsSummary.TotalCount); var text = string.Format("========== Total Tests: {0} passed, {1} failed, {2} total ==========\n", testResultsSummary.PassedCount, testResultsSummary.FailedCount, testResultsSummary.TotalCount); testPane.OutputString(text); SetStatusBarMessage(statusBarText); }
public void Transform(TestCaseSummary testFileSummary, string outFile) { if (testFileSummary == null) throw new ArgumentNullException("testFileSummary"); if(string.IsNullOrEmpty(outFile)) throw new ArgumentNullException("outFile"); var result = Transform(testFileSummary); File.WriteAllText(outFile, result); }
public override void TestSuiteFinished(TestCaseSummary testResultsSummary) { if (testResultsSummary.CoverageObject != null) { WriteCoverageFiles(testResultsSummary.CoverageObject); } base.TestSuiteFinished(testResultsSummary); }
public void ProcessTransforms(IEnumerable<TestContext> testContexts, TestCaseSummary overallSummary) { var allTestSettings = testContexts.Select(x => x.TestFileSettings).Distinct(); foreach (var settings in allTestSettings) { if (settings.Transforms != null && settings.Transforms.Any()) { ProcessTransforms(settings, overallSummary); } } }
public virtual void Transform(TestCaseSummary testFileSummary, string outFile) { if (testFileSummary == null) { throw new ArgumentNullException("testFileSummary"); } else if (string.IsNullOrEmpty(outFile)) { throw new ArgumentNullException("outFile"); } var result = Transform(testFileSummary); fileSystem.WriteAllText(outFile, result, Encoding); }
public TransformResult ProcessTransforms(IEnumerable<TestContext> testContexts, TestCaseSummary overallSummary) { var results = new TransformResult(); var allTestSettings = testContexts.Select(x => x.TestFileSettings).Distinct(); foreach (var settings in allTestSettings) { if (settings.Transforms != null && settings.Transforms.Any()) { ProcessTransforms(settings, overallSummary, results); } } return results; }
public override void TestSuiteFinished(TestCaseSummary testResultsSummary) { base.TestSuiteFinished(testResultsSummary); if (!runContext.IsDataCollectionEnabled || testResultsSummary.CoverageObject == null) { return; } var directory = runContext.SolutionDirectory; var coverageHtmlFile = CoverageOutputGenerator.WriteHtmlFile(directory, testResultsSummary.CoverageObject); var processHelper = new ProcessHelper(); processHelper.LaunchFileInBrowser(coverageHtmlFile); }
public override string Transform(TestCaseSummary testFileSummary) { if (testFileSummary == null) { throw new ArgumentNullException("testFileSummary"); } else if (testFileSummary.CoverageObject == null) { return string.Empty; } var sb = new StringBuilder(); foreach (var coverageData in testFileSummary.CoverageObject.Values) { AppendCoverageForFile(sb, coverageData); } return sb.ToString(); }
private XmlElement AddTestResultsRoot(TestCaseSummary summary, XmlDocument document) { var testResults = document.CreateElement("test-results"); testResults.SetAttribute("name", "Chutzpah Test Results"); testResults.SetAttribute("total", summary.TotalCount.ToString()); testResults.SetAttribute("errors", summary.Errors.Count.ToString()); testResults.SetAttribute("failures", summary.FailedCount.ToString()); testResults.SetAttribute("not-run", (summary.TotalCount - summary.Errors.Count - summary.FailedCount - summary.PassedCount).ToString()); testResults.SetAttribute("inconclusive", "0"); testResults.SetAttribute("ignored", "0"); testResults.SetAttribute("skipped", (summary.TotalCount - summary.Errors.Count - summary.FailedCount - summary.PassedCount).ToString()); testResults.SetAttribute("invalid", "0"); testResults.SetAttribute("date", DateTime.Now.ToString("yyyy-MM-dd")); testResults.SetAttribute("time", DateTime.Now.ToString("HH:mm:ss")); document.AppendChild(testResults); return testResults; }
private static TestCaseSummary GetTestCaseSummary() { var toReturn = new TestCaseSummary(); toReturn.CoverageObject = new CoverageData(); toReturn.CoverageObject["/no/coverage"] = new CoverageFileData { FilePath = "/no/coverage", LineExecutionCounts = null }; toReturn.CoverageObject["/some/lines"] = new CoverageFileData { FilePath = "/some/lines", LineExecutionCounts = new int?[] { 1, 2, null, 5, 0 } }; return toReturn; }
public override void Transform(TestCaseSummary testFileSummary, string outFile) { if (testFileSummary == null) { throw new ArgumentNullException("testFileSummary"); } if (string.IsNullOrEmpty(outFile)) { throw new ArgumentNullException("outFile"); } if (testFileSummary.CoverageObject == null) { return; } CoverageOutputGenerator.WriteHtmlFile(outFile, testFileSummary.CoverageObject); }
public override string Transform(TestCaseSummary testFileSummary) { if (testFileSummary == null) { throw new ArgumentNullException("testFileSummary"); } GetOverallStats(testFileSummary.CoverageObject); var builder = new StringBuilder(); builder.AppendLine(@"<?xml version=""1.0"" encoding=""UTF-8"" ?>"); builder.AppendLine(@"<report>"); AppendOverallStats(builder, testFileSummary.CoverageObject); builder.AppendLine(@" <data>"); builder.AppendLine(@" <all name=""all classes"">"); AppendOverallCoverage(builder); AppendCoverageBySourceFile(builder, testFileSummary.CoverageObject); builder.AppendLine(@" </all>"); builder.AppendLine(@" </data>"); builder.AppendLine(@"</report>"); return builder.ToString(); }
public override string Transform(TestCaseSummary testFileSummary) { if (testFileSummary == null) throw new ArgumentNullException("testFileSummary"); var builder = new StringBuilder(); builder.AppendLine(@"<?xml version=""1.0"" encoding=""UTF-8"" ?>"); builder.AppendLine(@"<testsuites>"); foreach (TestFileSummary file in testFileSummary.TestFileSummaries) { builder.AppendLine( string.Format(@" <testsuite name=""{0}"" tests=""{1}"" failures=""{2}"" time=""{3}"">", Encode(file.Path), file.Tests.Count, file.Tests.Count(x => !x.ResultsAllPassed), ConvertMillisecondsToSeconds(file.TimeTaken))); ; foreach (TestCase test in file.Tests) { if (test.ResultsAllPassed) { builder.AppendLine(string.Format(@" <testcase name=""{0}"" time=""{1}"" />", Encode(test.GetDisplayName()), ConvertMillisecondsToSeconds(test.TimeTaken))); } else { TestResult failureCase = test.TestResults.FirstOrDefault(x => !x.Passed); if (failureCase != null) { string failureMessage = failureCase.GetFailureMessage(); builder.AppendLine(string.Format(@" <testcase name=""{0}"" time=""{1}"">", Encode(test.GetDisplayName()), ConvertMillisecondsToSeconds(test.TimeTaken))); builder.AppendLine(string.Format(@" <failure message=""{0}""></failure>", Encode(failureMessage))); builder.AppendLine(string.Format(@" </testcase>")); } } } builder.AppendLine(@" </testsuite>"); } builder.AppendLine(@"</testsuites>"); return builder.ToString(); }
public override void TestSuiteFinished(TestCaseSummary testResultsSummary) { base.TestSuiteFinished(testResultsSummary); if(!runContext.IsDataCollectionEnabled || testResultsSummary.CoverageObject == null) { return; } try { // If we do not have a solutiondirectory, we assume that we are running in tfs build // In that case we only write to the testrundirectory and do not open a browser if(string.IsNullOrEmpty(runContext.SolutionDirectory)) { ChutzpahTracer.TraceInformation("Chutzpah runs in TFSBuild, writing coverage file to {0}", runContext.TestRunDirectory); var directory = runContext.TestRunDirectory; CoverageOutputGenerator.WriteHtmlFile(directory, testResultsSummary.CoverageObject); CoverageOutputGenerator.WriteJsonFile(directory, testResultsSummary.CoverageObject); } else { ChutzpahTracer.TraceInformation("Chutzpah runs not in TFSBuild opening coverage file in browser"); var directory = runContext.SolutionDirectory; var coverageHtmlFile = CoverageOutputGenerator.WriteHtmlFile(directory, testResultsSummary.CoverageObject); var processHelper = new ProcessHelper(); processHelper.LaunchFileInBrowser(coverageHtmlFile); } } catch(Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format("Error while writing coverage output: {0}", e)); } }
private TestCaseSummary ProcessTestPaths(IEnumerable<string> testPaths, TestOptions options, TestExecutionMode testExecutionMode, ITestMethodRunnerCallback callback) { options.TestExecutionMode = testExecutionMode; stopWatch.Start(); string headlessBrowserPath = fileProbe.FindFilePath(HeadlessBrowserName); if (testPaths == null) throw new ArgumentNullException("testPaths"); if (headlessBrowserPath == null) throw new FileNotFoundException("Unable to find headless browser: " + HeadlessBrowserName); if (fileProbe.FindFilePath(TestRunnerJsName) == null) throw new FileNotFoundException("Unable to find test runner base js file: " + TestRunnerJsName); var overallSummary = new TestCaseSummary(); // Concurrent list to collect test contexts var testContexts = new ConcurrentBag<TestContext>(); // Concurrent collection used to gather the parallel results from var testFileSummaries = new ConcurrentQueue<TestFileSummary>(); var resultCount = 0; var cancellationSource = new CancellationTokenSource(); // Given the input paths discover the potential test files var scriptPaths = FindTestFiles(testPaths, options); // Group the test files by their chutzpah.json files. Then check if those settings file have batching mode enabled. // If so, we keep those tests in a group together to be used in one context // Otherwise, we put each file in its own test group so each get their own context var testRunConfiguration = BuildTestRunConfiguration(scriptPaths, options); ConfigureTracing(testRunConfiguration); var parallelism = testRunConfiguration.MaxDegreeOfParallelism.HasValue ? Math.Min(options.MaxDegreeOfParallelism, testRunConfiguration.MaxDegreeOfParallelism.Value) : options.MaxDegreeOfParallelism; var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = parallelism, CancellationToken = cancellationSource.Token }; ChutzpahTracer.TraceInformation("Chutzpah run started in mode {0} with parallelism set to {1}", testExecutionMode, parallelOptions.MaxDegreeOfParallelism); // Build test contexts in parallel given a list of files each BuildTestContexts(options, testRunConfiguration.TestGroups, parallelOptions, cancellationSource, resultCount, testContexts, callback, overallSummary); // Compile the test contexts if (!PerformBatchCompile(callback, testContexts)) { return overallSummary; } // Build test harness for each context and execute it in parallel ExecuteTestContexts(options, testExecutionMode, callback, testContexts, parallelOptions, headlessBrowserPath, testFileSummaries, overallSummary); // Gather TestFileSummaries into TaseCaseSummary foreach (var fileSummary in testFileSummaries) { overallSummary.Append(fileSummary); } stopWatch.Stop(); overallSummary.SetTotalRunTime((int)stopWatch.Elapsed.TotalMilliseconds); overallSummary.TransformResult = transformProcessor.ProcessTransforms(testContexts, overallSummary); // Clear the settings file cache since in VS Chutzpah is not unloaded from memory. // If we don't clear then the user can never update the file. testSettingsService.ClearCache(); ChutzpahTracer.TraceInformation( "Chutzpah run finished with {0} passed, {1} failed and {2} errors", overallSummary.PassedCount, overallSummary.FailedCount, overallSummary.Errors.Count); return overallSummary; }
public override string Transform(TestCaseSummary testFileSummary) { if(testFileSummary == null) throw new ArgumentNullException("testFileSummary"); testRun = new TestRunType { id = Guid.NewGuid().ToString(), name = "Chutzpah_JS_UnitTest_" + DateTime.Now.ToString("yy-MMM-dd hh:mm:ss zz") }; var windowsIdentity = System.Security.Principal.WindowsIdentity.GetCurrent(); if (windowsIdentity != null) testRun.runUser = windowsIdentity.Name; testRun.Items = new object[] { new TestRunTypeResultSummary(), new ResultsType(), new TestDefinitionType(), new TestEntriesType1(), new TestRunTypeTestLists(), new TestRunTypeTimes(), new TestSettingsType { name = "Default", id = Guid.NewGuid().ToString(), Execution = new TestSettingsTypeExecution { TestTypeSpecific = new TestSettingsTypeExecutionTestTypeSpecific{} } } }; // Time taken is current time testRun.Items.GetInstance<TestRunTypeTimes>(VSTSExtensions.TestRunItemType.Times).creation = DateTime.Now.AddSeconds(-testFileSummary.TimeTaken).ToString("O"); testRun.Items.GetInstance<TestRunTypeTimes>(VSTSExtensions.TestRunItemType.Times).start = DateTime.Now.AddSeconds(-testFileSummary.TimeTaken).ToString("O"); testRun.Items.GetInstance<TestRunTypeTimes>(VSTSExtensions.TestRunItemType.Times).queuing = DateTime.Now.AddSeconds(-testFileSummary.TimeTaken).ToString("O"); var testList = new TestListType { name = "Results Not in a List", id = Guid.NewGuid().ToString() }; var testTypeId = Guid.NewGuid(); var currentTestCases = testFileSummary.Tests.Select(x => new VSTSTestCase().UpdateWith(x)); var testsHelper = currentTestCases.ToList(); testRun.Items.GetInstance<TestRunTypeTimes>(VSTSExtensions.TestRunItemType.Times).finish = DateTime.Now.ToString("O"); testRun.Items.GetInstance<TestRunTypeResultSummary>(VSTSExtensions.TestRunItemType.ResultSummary).outcome = testsHelper.Count(x => x.Passed) == testsHelper.Count() ? "Passed" : "Failed"; var counter = new CountersType { aborted = 0, completed = 0, disconnected = 0, error = 0, passed = testsHelper.Count(x => x.Passed), executed = testsHelper.Count, failed = testsHelper.Count(x => !x.Passed), total = testsHelper.Count, inProgress = 0, pending = 0, warning = 0, notExecuted = 0, notRunnable = 0, passedButRunAborted = 0, inconclusive = 0, timeout = 0 }; // total attribute is not written if totalSpecified is false counter.totalSpecified = true; testRun.Items.GetInstance<TestRunTypeResultSummary>(VSTSExtensions.TestRunItemType.ResultSummary).Items = new object[] { counter }; testRun.Items.GetInstance<TestDefinitionType>(VSTSExtensions.TestRunItemType.TestDefinition).Items = testsHelper .Select( (testCase) => new UnitTestType { id = testCase.Id.ToString(), name = testCase.TestName, storage = testCase.InputTestFile, Items = new[] { new BaseTestTypeExecution { id= testCase.ExecutionId.ToString() } }, TestMethod = new UnitTestTypeTestMethod { adapterTypeName = "Microsoft.VisualStudio.TestTools.TestTypes.Unit.UnitTestAdapter", className = Path.GetFileNameWithoutExtension(testCase.InputTestFile), codeBase = testCase.InputTestFile, name = testCase.TestName } }).ToArray(); testRun.Items.GetInstance<TestDefinitionType>(VSTSExtensions.TestRunItemType.TestDefinition).ItemsElementName = testsHelper .Select( (testCase) => ItemsChoiceType4.UnitTest).ToArray(); testRun.Items.GetInstance<TestRunTypeTestLists>(VSTSExtensions.TestRunItemType.TestLists).TestList = new[] { testList, // This has to be hard-coded. new TestListType { name = "All Loaded Results", id = "19431567-8539-422a-85d7-44ee4e166bda" } }; testRun.Items.GetInstance<TestEntriesType1>(VSTSExtensions.TestRunItemType.TestEntries).TestEntry = testsHelper.Select(testCase => new TestEntryType { testId = testCase.Id.ToString(), executionId = testCase.ExecutionId.ToString(), testListId = testList.id }).ToArray(); testRun.Items.GetInstance<ResultsType>(VSTSExtensions.TestRunItemType.Results).Items = testsHelper.Select((testCase) => { var unitTestResultType = new UnitTestResultType { executionId = testCase.ExecutionId.ToString(), testId = testCase.Id.ToString(), testName = testCase.TestName, computerName = Environment.MachineName, duration = new TimeSpan(0,0,testCase.TimeTaken).ToString("c"), // I tried adding this to StandardConsoleRunner, but it demanded too many changes. // Setting start to the creation date. startTime = DateTime.Now.AddSeconds(-testFileSummary.TimeTaken).ToString("O"), // Setting end time to creation date + time taken to run this test. endTime = DateTime.Now.AddSeconds((-testFileSummary.TimeTaken) + testCase.TimeTaken).ToString("O"), // This is for specific test type. testType = "13cdc9d9-ddb5-4fa4-a97d-d965ccfc6d4b", outcome = testCase.Passed ? "Passed" : "Failed", testListId = testList.id, }; if (!testCase.Passed) { unitTestResultType.Items = new[] { new OutputType() { ErrorInfo = new OutputTypeErrorInfo{Message = testCase.exception !=null ? testCase.exception.ToString(): string.Join(",",testCase.TestResults.Where(x=>!x.Passed).Select(x=>x.Message))} } }; } return unitTestResultType; }).ToArray(); testRun.Items.GetInstance<ResultsType>(VSTSExtensions.TestRunItemType.Results).ItemsElementName = testsHelper.Select(testCase => ItemsChoiceType3.UnitTestResult).ToArray(); var stringStream = new StringWriter(); var xs = new XmlSerializer(typeof(TestRunType)); xs.Serialize(stringStream,testRun); return stringStream.ToString(); }
public override void TestSuiteFinished(TestCaseSummary summary) { base.TestSuiteFinished(summary); WriteLine("testSuiteFinished name='{0}'", Escape(testSuiteName)); }
private static void ProcessTestSummaryTransformers(CommandLine commandLine, TestCaseSummary testResultsSummary) { var transformers = new SummaryTransformerProvider().GetTransformers(new FileSystemWrapper()); foreach (var transformer in transformers.Where(x => commandLine.UnmatchedArguments.ContainsKey(x.Name))) { var path = commandLine.UnmatchedArguments[transformer.Name]; transformer.Transform(testResultsSummary, path); } }
public virtual void TestSuiteFinished(TestCaseSummary testResultsSummary) { }
private void BuildTestContexts( TestOptions options, List<List<PathInfo>> scriptPathGroups, ParallelOptions parallelOptions, CancellationTokenSource cancellationSource, int resultCount, ConcurrentBag<TestContext> testContexts, ITestMethodRunnerCallback callback, TestCaseSummary overallSummary) { Parallel.ForEach(scriptPathGroups, parallelOptions, testFiles => { var pathString = string.Join(",", testFiles.Select(x => x.FullPath)); ChutzpahTracer.TraceInformation("Building test context for {0}", pathString); try { if (cancellationSource.IsCancellationRequested) return; TestContext testContext; resultCount++; if (testContextBuilder.TryBuildContext(testFiles, options, out testContext)) { testContexts.Add(testContext); } else { ChutzpahTracer.TraceWarning("Unable to build test context for {0}", pathString); } // Limit the number of files we can scan to attempt to build a context for // This is important in the case of folder scanning where many JS files may not be // test files. if (resultCount >= options.FileSearchLimit) { ChutzpahTracer.TraceError("File search limit hit!!!"); cancellationSource.Cancel(); } } catch (Exception e) { var error = new TestError { InputTestFile = testFiles.Select(x => x.FullPath).FirstOrDefault(), Message = e.ToString() }; overallSummary.Errors.Add(error); callback.FileError(error); ChutzpahTracer.TraceError(e, "Error during building test context for {0}", pathString); } finally { ChutzpahTracer.TraceInformation("Finished building test context for {0}", pathString); } }); }
public override void TestSuiteFinished(TestCaseSummary summary) { base.TestSuiteFinished(summary); Console.WriteLine("##teamcity[testSuiteFinished name='{0}']",Escape(ChutzpahJavascriptTestSuiteName)); }
// We are overwritting the default Transform above so we do not need this method public override string Transform(TestCaseSummary testFileSummary) { throw new NotImplementedException(); }
private void ExecuteTestContexts( TestOptions options, TestExecutionMode testExecutionMode, ITestMethodRunnerCallback callback, ConcurrentBag<TestContext> testContexts, ParallelOptions parallelOptions, string headlessBrowserPath, ConcurrentQueue<TestFileSummary> testFileSummaries, TestCaseSummary overallSummary) { Parallel.ForEach( testContexts, parallelOptions, testContext => { ChutzpahTracer.TraceInformation("Start test run for {0} in {1} mode", testContext.FirstInputTestFile, testExecutionMode); try { testHarnessBuilder.CreateTestHarness(testContext, options); if (options.TestLaunchMode == TestLaunchMode.FullBrowser) { ChutzpahTracer.TraceInformation( "Launching test harness '{0}' for file '{1}' in a browser", testContext.TestHarnessPath, testContext.FirstInputTestFile); // Allow override from command line. var browserArgs = testContext.TestFileSettings.BrowserArguments; if (!string.IsNullOrWhiteSpace(options.BrowserArgs)) { var path = BrowserPathHelper.GetBrowserPath(options.BrowserName); browserArgs = new Dictionary<string, string> { { Path.GetFileNameWithoutExtension(path), options.BrowserArgs } }; } process.LaunchFileInBrowser(testContext.TestHarnessPath, options.BrowserName, browserArgs); } else if (options.TestLaunchMode == TestLaunchMode.HeadlessBrowser) { ChutzpahTracer.TraceInformation( "Invoking headless browser on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testContext.FirstInputTestFile); var testSummaries = InvokeTestRunner( headlessBrowserPath, options, testContext, testExecutionMode, callback); foreach (var testSummary in testSummaries) { ChutzpahTracer.TraceInformation( "Test harness '{0}' for file '{1}' finished with {2} passed, {3} failed and {4} errors", testContext.TestHarnessPath, testSummary.Path, testSummary.PassedCount, testSummary.FailedCount, testSummary.Errors.Count); ChutzpahTracer.TraceInformation( "Finished running headless browser on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testSummary.Path); testFileSummaries.Enqueue(testSummary); } } else if (options.TestLaunchMode == TestLaunchMode.Custom) { if (options.CustomTestLauncher == null) { throw new ArgumentNullException("TestOptions.CustomTestLauncher"); } ChutzpahTracer.TraceInformation( "Launching custom test on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testContext.FirstInputTestFile); options.CustomTestLauncher.LaunchTest(testContext); } else { Debug.Assert(false); } } catch (Exception e) { var error = new TestError { InputTestFile = testContext.InputTestFiles.FirstOrDefault(), Message = e.ToString() }; overallSummary.Errors.Add(error); callback.FileError(error); ChutzpahTracer.TraceError(e, "Error during test execution of {0}", testContext.FirstInputTestFile); } finally { ChutzpahTracer.TraceInformation("Finished test run for {0} in {1} mode", testContext.FirstInputTestFile, testExecutionMode); } }); // Clean up test context foreach (var testContext in testContexts) { // Don't clean up context if in debug mode if (!m_debugEnabled && options.TestLaunchMode != TestLaunchMode.FullBrowser && options.TestLaunchMode != TestLaunchMode.Custom) { try { ChutzpahTracer.TraceInformation("Cleaning up test context for {0}", testContext.FirstInputTestFile); testContextBuilder.CleanupContext(testContext); } catch (Exception e) { ChutzpahTracer.TraceError(e, "Error cleaning up test context for {0}", testContext.FirstInputTestFile); } } } }