private static void HandleTestProcessExitCode(int exitCode, string inputTestFile, IList <TestError> errors, ITestMethodRunnerCallback callback) { string errorMessage = null; switch ((TestProcessExitCode)exitCode) { case TestProcessExitCode.AllPassed: case TestProcessExitCode.SomeFailed: return; case TestProcessExitCode.Timeout: errorMessage = "Timeout occurred when executing test file"; break; default: errorMessage = "Unknown error occurred when executing test file. Received exit code of " + exitCode; break; } if (!string.IsNullOrEmpty(errorMessage)) { var error = new TestError { InputTestFile = inputTestFile, Message = errorMessage }; errors.Add(error); callback.FileError(error); ChutzpahTracer.TraceError("Headless browser returned with an error: {0}", errorMessage); } }
private static void PlayDeferredEvents(StreamingTestFileContext currentTestFileContext, IList <Action <StreamingTestFileContext> > deferredEvents) { try { if (currentTestFileContext == null) { return; } // Since we found a unique match we need to reply and log the events that came before this // using this file context // We lock here since in the event of a timeout this may be run from the timeout handler while the phantom // process is still running lock (deferredEvents) { foreach (var deferredEvent in deferredEvents) { deferredEvent(currentTestFileContext); } } } catch (Exception e) { ChutzpahTracer.TraceError(e, "Unable to play deferred events"); } }
private void BuildTestContexts( TestOptions options, IEnumerable <PathInfo> scriptPaths, ParallelOptions parallelOptions, CancellationTokenSource cancellationSource, int resultCount, ConcurrentBag <TestContext> testContexts, ITestMethodRunnerCallback callback, TestCaseSummary overallSummary) { Parallel.ForEach(scriptPaths, parallelOptions, testFile => { ChutzpahTracer.TraceInformation("Building test context for {0}", testFile.FullPath); try { if (cancellationSource.IsCancellationRequested) { return; } TestContext testContext; resultCount++; if (testContextBuilder.TryBuildContext(testFile, options, out testContext)) { testContexts.Add(testContext); } else { ChutzpahTracer.TraceWarning("Unable to build test context for {0}", testFile.FullPath); } // Limit the number of files we can scan to attempt to build a context for // This is important in the case of folder scanning where many JS files may not be // test files. if (resultCount >= options.FileSearchLimit) { ChutzpahTracer.TraceError("File search limit hit!!!"); cancellationSource.Cancel(); } } catch (Exception e) { var error = new TestError { InputTestFile = testFile.FullPath, Message = e.ToString() }; overallSummary.Errors.Add(error); callback.FileError(error); ChutzpahTracer.TraceError(e, "Error during building test context for {0}", testFile.FullPath); } finally { ChutzpahTracer.TraceInformation("Finished building test context for {0}", testFile.FullPath); } }); }
private string GetTestHarnessTemplatePath(IFrameworkDefinition definition, ChutzpahTestSettingsFile chutzpahTestSettings) { string templatePath = null; if (!string.IsNullOrEmpty(chutzpahTestSettings.CustomTestHarnessPath)) { // If CustomTestHarnessPath is absolute path then Path.Combine just returns it var harnessPath = Path.Combine(chutzpahTestSettings.SettingsFileDirectory, chutzpahTestSettings.CustomTestHarnessPath); var fullPath = fileProbe.FindFilePath(harnessPath); if (fullPath != null) { ChutzpahTracer.TraceInformation("Using Custom Test Harness from {0}", fullPath); templatePath = fullPath; } else { ChutzpahTracer.TraceError("Cannot find Custom Test Harness at {0}", chutzpahTestSettings.CustomTestHarnessPath); } } if (templatePath == null) { templatePath = fileProbe.GetPathInfo(Path.Combine(Constants.TestFileFolder, definition.GetTestHarness(chutzpahTestSettings))).FullPath; ChutzpahTracer.TraceInformation("Using builtin Test Harness from {0}", templatePath); } return(templatePath); }
private void FireErrorOutput(TestContext testContext, ITestMethodRunnerCallback callback, StreamingTestFileContext testFileContext, JsRunnerOutput jsRunnerOutput) { var error = jsRunnerOutput as JsError; error.Error.InputTestFile = testFileContext.ReferencedFile.Path; error.Error.PathFromTestSettingsDirectory = testFileContext.ReferencedFile.PathFromTestSettingsDirectory; callback.FileError(testContext, error.Error); testFileContext.TestFileSummary.Errors.Add(error.Error); if (testFileContext.TestContext.TestFileSettings.CreateFailedTestForFileError.GetValueOrDefault()) { var fileErrorTest = new TestCase(); fileErrorTest.InputTestFile = testFileContext.ReferencedFile.Path; fileErrorTest.PathFromTestSettingsDirectory = testFileContext.ReferencedFile.PathFromTestSettingsDirectory; fileErrorTest.TestName = string.Format("!! File Error #{0} - Error encountered outside of test case execution !!", testFileContext.TestFileSummary.Errors.Count); fileErrorTest.TestResults.Add(new TestResult { Passed = false, StackTrace = error.Error.StackAsString ?? error.Error.FormatStackObject(), Message = error.Error.Message }); callback.TestStarted(testContext, fileErrorTest); callback.TestFinished(testContext, fileErrorTest); testFileContext.TestFileSummary.AddTestCase(fileErrorTest); } ChutzpahTracer.TraceError("Error received from Phantom {0}", error.Error.Message); }
public IList <TestFileSummary> Read(ProcessStream processStream, TestOptions testOptions, TestContext testContext, ITestMethodRunnerCallback callback, bool debugEnabled) { if (processStream == null) { throw new ArgumentNullException("processStream"); } if (testOptions == null) { throw new ArgumentNullException("testOptions"); } if (testContext == null) { throw new ArgumentNullException("testContext"); } lastTestEvent = DateTime.Now; var timeout = (testContext.TestFileSettings.TestFileTimeout ?? testOptions.TestFileTimeoutMilliseconds) + 500; // Add buffer to timeout to account for serialization var codeCoverageEnabled = (!testContext.TestFileSettings.EnableCodeCoverage.HasValue && testOptions.CoverageOptions.Enabled) || (testContext.TestFileSettings.EnableCodeCoverage.HasValue && testContext.TestFileSettings.EnableCodeCoverage.Value); var streamingTestFileContexts = testContext.ReferencedFiles .Where(x => x.IsFileUnderTest) .Select(x => new StreamingTestFileContext(x, testContext, codeCoverageEnabled)) .ToList(); var deferredEvents = new List <Action <StreamingTestFileContext> >(); var readerTask = Task <IList <TestFileSummary> > .Factory.StartNew(() => ReadFromStream(processStream.StreamReader, testContext, testOptions, streamingTestFileContexts, deferredEvents, callback, debugEnabled)); while (readerTask.Status == TaskStatus.WaitingToRun || (readerTask.Status == TaskStatus.Running && (DateTime.Now - lastTestEvent).TotalMilliseconds < timeout)) { Thread.Sleep(100); } if (readerTask.IsCompleted) { ChutzpahTracer.TraceInformation("Finished reading stream from test file '{0}'", testContext.FirstInputTestFile); return(readerTask.Result); } else { // Since we times out make sure we play the deferred events so we do not lose errors // We will just attach these events to the first test context at this point since we do // not know where they belong PlayDeferredEvents(streamingTestFileContexts.FirstOrDefault(), deferredEvents); // We timed out so kill the process and return an empty test file summary ChutzpahTracer.TraceError("Test file '{0}' timed out after running for {1} milliseconds", testContext.FirstInputTestFile, (DateTime.Now - lastTestEvent).TotalMilliseconds); processStream.TimedOut = true; processStream.KillProcess(); return(testContext.ReferencedFiles.Where(x => x.IsFileUnderTest).Select(file => new TestFileSummary(file.Path)).ToList()); } }
private void FireErrorOutput(ITestMethodRunnerCallback callback, TestFileContext testFileContext, JsRunnerOutput jsRunnerOutput) { var error = jsRunnerOutput as JsError; error.Error.InputTestFile = testFileContext.ReferencedFile.Path; callback.FileError(error.Error); testFileContext.TestFileSummary.Errors.Add(error.Error); ChutzpahTracer.TraceError("Eror recieved from Phantom {0}", error.Error.Message); }
private bool PerformBatchCompile(ITestMethodRunnerCallback callback, IEnumerable <TestContext> testContexts) { try { batchCompilerService.Compile(testContexts); } catch (ChutzpahCompilationFailedException e) { callback.ExceptionThrown(e, e.SettingsFile); ChutzpahTracer.TraceError(e, "Error during batch compile from {0}", e.SettingsFile); return(false); } return(true); }
private IList <ReferencedFile> ExpandNestedReferences( HashSet <string> discoveredPaths, IFrameworkDefinition definition, string currentFilePath, ChutzpahTestSettingsFile chutzpahTestSettings) { try { string textToParse = fileSystem.GetText(currentFilePath); return(GetReferencedFiles(discoveredPaths, definition, textToParse, currentFilePath, chutzpahTestSettings)); } catch (IOException e) { // Unable to get file text ChutzpahTracer.TraceError(e, "Unable to get file text from test reference with path {0}", currentFilePath); } return(new List <ReferencedFile>()); }
public IList <TestFileSummary> Read(ProcessStream processStream, TestOptions testOptions, TestContext testContext, ITestMethodRunnerCallback callback, bool debugEnabled) { if (processStream == null) { throw new ArgumentNullException("processStream"); } if (testOptions == null) { throw new ArgumentNullException("testOptions"); } if (testContext == null) { throw new ArgumentNullException("testContext"); } lastTestEvent = DateTime.Now; var timeout = (testContext.TestFileSettings.TestFileTimeout ?? testOptions.TestFileTimeoutMilliseconds) + 500; // Add buffer to timeout to account for serialization var readerTask = Task <IList <TestFileSummary> > .Factory.StartNew(() => ReadFromStream(processStream.StreamReader, testContext, testOptions, callback, debugEnabled)); while (readerTask.Status == TaskStatus.WaitingToRun || (readerTask.Status == TaskStatus.Running && (DateTime.Now - lastTestEvent).TotalMilliseconds < timeout)) { Thread.Sleep(100); } if (readerTask.IsCompleted) { ChutzpahTracer.TraceInformation("Finished reading stream from test file '{0}'", testContext.FirstInputTestFile); return(readerTask.Result); } else { // We timed out so kill the process and return an empty test file summary ChutzpahTracer.TraceError("Test file '{0}' timed out after running for {1} milliseconds", testContext.FirstInputTestFile, (DateTime.Now - lastTestEvent).TotalMilliseconds); processStream.TimedOut = true; processStream.KillProcess(); return(testContext.ReferencedFiles.Where(x => x.IsFileUnderTest).Select(file => new TestFileSummary(file.Path)).ToList()); } }
private void ExecuteTestContexts( TestOptions options, TestExecutionMode testExecutionMode, ITestMethodRunnerCallback callback, ConcurrentBag <TestContext> testContexts, ParallelOptions parallelOptions, string headlessBrowserPath, ConcurrentQueue <TestFileSummary> testFileSummaries, TestCaseSummary overallSummary) { Parallel.ForEach( testContexts, parallelOptions, testContext => { ChutzpahTracer.TraceInformation("Start test run for {0} in {1} mode", testContext.InputTestFile, testExecutionMode); try { testHarnessBuilder.CreateTestHarness(testContext, options); if (options.OpenInBrowser) { ChutzpahTracer.TraceInformation( "Launching test harness '{0}' for file '{1}' in a browser", testContext.TestHarnessPath, testContext.InputTestFile); process.LaunchFileInBrowser(testContext.TestHarnessPath); } else { ChutzpahTracer.TraceInformation( "Invoking headless browser on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testContext.InputTestFile); var testSummary = InvokeTestRunner( headlessBrowserPath, options, testContext, testExecutionMode, callback); ChutzpahTracer.TraceInformation( "Test harness '{0}' for file '{1}' finished with {2} passed, {3} failed and {4} errors", testContext.TestHarnessPath, testContext.InputTestFile, testSummary.PassedCount, testSummary.FailedCount, testSummary.Errors.Count); ChutzpahTracer.TraceInformation( "Finished running headless browser on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testContext.InputTestFile); testFileSummaries.Enqueue(testSummary); } } catch (Exception e) { var error = new TestError { InputTestFile = testContext.InputTestFile, Message = e.ToString() }; overallSummary.Errors.Add(error); callback.FileError(error); ChutzpahTracer.TraceError(e, "Error during test execution of {0}", testContext.InputTestFile); } finally { ChutzpahTracer.TraceInformation("Finished test run for {0} in {1} mode", testContext.InputTestFile, testExecutionMode); } }); // Clean up test context foreach (var testContext in testContexts) { // Don't clean up context if in debug mode if (!m_debugEnabled && !options.OpenInBrowser) { try { ChutzpahTracer.TraceInformation("Cleaning up test context for {0}", testContext.InputTestFile); testContextBuilder.CleanupContext(testContext); } catch (Exception e) { ChutzpahTracer.TraceError(e, "Error cleaning up test context for {0}", testContext.InputTestFile); } } } }
private void ExecuteTestContexts( TestOptions options, TestExecutionMode testExecutionMode, ITestMethodRunnerCallback callback, ConcurrentBag <TestContext> testContexts, ParallelOptions parallelOptions, string headlessBrowserPath, ConcurrentQueue <TestFileSummary> testFileSummaries, TestCaseSummary overallSummary) { Parallel.ForEach( testContexts, parallelOptions, testContext => { ChutzpahTracer.TraceInformation("Start test run for {0} in {1} mode", testContext.FirstInputTestFile, testExecutionMode); try { try { testHarnessBuilder.CreateTestHarness(testContext, options); } catch (IOException) { // Mark this creation failed so we do not try to clean it up later // This is to work around a bug in TestExplorer that runs chutzpah in parallel on // the same files // TODO(mmanela): Re-evalute if this is needed once they fix that bug testContext.TestHarnessCreationFailed = true; ChutzpahTracer.TraceWarning("Marking test harness creation failed for harness {0} and test file {1}", testContext.TestHarnessPath, testContext.FirstInputTestFile); throw; } if (options.TestLaunchMode == TestLaunchMode.FullBrowser) { ChutzpahTracer.TraceInformation( "Launching test harness '{0}' for file '{1}' in a browser", testContext.TestHarnessPath, testContext.FirstInputTestFile); // Allow override from command line. var browserArgs = testContext.TestFileSettings.BrowserArguments; if (!string.IsNullOrWhiteSpace(options.BrowserArgs)) { var path = BrowserPathHelper.GetBrowserPath(options.BrowserName); browserArgs = new Dictionary <string, string> { { Path.GetFileNameWithoutExtension(path), options.BrowserArgs } }; } process.LaunchFileInBrowser(testContext.TestHarnessPath, options.BrowserName, browserArgs); } else if (options.TestLaunchMode == TestLaunchMode.HeadlessBrowser) { ChutzpahTracer.TraceInformation( "Invoking headless browser on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testContext.FirstInputTestFile); var testSummaries = InvokeTestRunner( headlessBrowserPath, options, testContext, testExecutionMode, callback); foreach (var testSummary in testSummaries) { ChutzpahTracer.TraceInformation( "Test harness '{0}' for file '{1}' finished with {2} passed, {3} failed and {4} errors", testContext.TestHarnessPath, testSummary.Path, testSummary.PassedCount, testSummary.FailedCount, testSummary.Errors.Count); ChutzpahTracer.TraceInformation( "Finished running headless browser on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testSummary.Path); testFileSummaries.Enqueue(testSummary); } } else if (options.TestLaunchMode == TestLaunchMode.Custom) { if (options.CustomTestLauncher == null) { throw new ArgumentNullException("TestOptions.CustomTestLauncher"); } ChutzpahTracer.TraceInformation( "Launching custom test on test harness '{0}' for file '{1}'", testContext.TestHarnessPath, testContext.FirstInputTestFile); options.CustomTestLauncher.LaunchTest(testContext); } else { Debug.Assert(false); } } catch (Exception e) { var error = new TestError { InputTestFile = testContext.InputTestFiles.FirstOrDefault(), Message = e.ToString() }; overallSummary.Errors.Add(error); callback.FileError(error); ChutzpahTracer.TraceError(e, "Error during test execution of {0}", testContext.FirstInputTestFile); } finally { ChutzpahTracer.TraceInformation("Finished test run for {0} in {1} mode", testContext.FirstInputTestFile, testExecutionMode); } }); // Clean up test context foreach (var testContext in testContexts) { // Don't clean up context if in debug mode if (!m_debugEnabled && !testContext.TestHarnessCreationFailed && options.TestLaunchMode != TestLaunchMode.FullBrowser && options.TestLaunchMode != TestLaunchMode.Custom) { try { ChutzpahTracer.TraceInformation("Cleaning up test context for {0}", testContext.FirstInputTestFile); testContextBuilder.CleanupContext(testContext); } catch (Exception e) { ChutzpahTracer.TraceError(e, "Error cleaning up test context for {0}", testContext.FirstInputTestFile); } } } }
private IList <TestFileSummary> ReadFromStream(StreamReader stream, TestContext testContext, TestOptions testOptions, ITestMethodRunnerCallback callback, bool debugEnabled) { var codeCoverageEnabled = (!testContext.TestFileSettings.EnableCodeCoverage.HasValue && testOptions.CoverageOptions.Enabled) || (testContext.TestFileSettings.EnableCodeCoverage.HasValue && testContext.TestFileSettings.EnableCodeCoverage.Value); var testFileContexts = testContext.ReferencedFiles .Where(x => x.IsFileUnderTest) .Select(x => new TestFileContext(x, testContext, codeCoverageEnabled)) .ToList(); var testIndex = 0; string line; TestFileContext currentTestFileContext = null; if (testFileContexts.Count == 1) { currentTestFileContext = testFileContexts.First(); } var deferredEvents = new List <Action <TestFileContext> >(); while ((line = stream.ReadLine()) != null) { if (debugEnabled) { Console.WriteLine(line); } var match = prefixRegex.Match(line); if (!match.Success) { continue; } var type = match.Groups["type"].Value; var json = match.Groups["json"].Value; // Only update last event timestamp if it is an important event. // Log and error could happen even though no test progress is made if (!type.Equals("Log") && !type.Equals("Error")) { lastTestEvent = DateTime.Now; } try { switch (type) { case "FileStart": FireFileStarted(callback, testContext); break; case "CoverageObject": var jsCov = jsonSerializer.Deserialize <JsCoverage>(json); if (currentTestFileContext == null) { deferredEvents.Add((fileContext) => FireCoverageObject(callback, fileContext, jsCov)); } else { FireCoverageObject(callback, currentTestFileContext, jsCov); } break; case "FileDone": var jsFileDone = jsonSerializer.Deserialize <JsFileDone>(json); FireFileFinished(callback, testContext.InputTestFilesString, testFileContexts, jsFileDone); break; case "TestStart": var jsTestCaseStart = jsonSerializer.Deserialize <JsTestCase>(json); TestFileContext newContext = null; var testName = jsTestCaseStart.TestCase.TestName.Trim(); var moduleName = (jsTestCaseStart.TestCase.ModuleName ?? "").Trim(); var fileContexts = GetFileMatches(testName, testFileContexts); if (fileContexts.Count == 0 && currentTestFileContext == null) { // If there are no matches and not file context has been used yet // then just choose the first context newContext = testFileContexts[0]; } else if (fileContexts.Count == 0) { // If there is already a current context and no matches we just keep using that context // unless this test name has been used already in the current context. In that case // move to the next one that hasn't seen this file yet var testAlreadySeenInCurrentContext = currentTestFileContext.HasTestBeenSeen(moduleName, testName); if (testAlreadySeenInCurrentContext) { newContext = testFileContexts.FirstOrDefault(x => !x.HasTestBeenSeen(moduleName, testName)) ?? currentTestFileContext; } } else if (fileContexts.Count > 1) { // If we found the test has more than one file match // try to choose the best match, otherwise just choose the first one // If we have no file context yet take the first one if (currentTestFileContext == null) { newContext = fileContexts.First(); } else { // In this case we have an existing file context so we need to // 1. Check to see if this test has been seen already on that context // if so we need to try the next file context that matches it // 2. If it is not seen yet in the current context and the current context // is one of the matches then keep using it var testAlreadySeenInCurrentContext = currentTestFileContext.HasTestBeenSeen(moduleName, testName); var currentContextInFileMatches = fileContexts.Any(x => x == currentTestFileContext); if (!testAlreadySeenInCurrentContext && currentContextInFileMatches) { // Keep the current context newContext = currentTestFileContext; } else { // Either take first not used context OR the first one newContext = fileContexts.Where(x => !x.IsUsed).FirstOrDefault() ?? fileContexts.First(); } } } else if (fileContexts.Count == 1) { // We found a unique match newContext = fileContexts[0]; } if (newContext != null && newContext != currentTestFileContext) { currentTestFileContext = newContext; testIndex = 0; } currentTestFileContext.IsUsed = true; currentTestFileContext.MarkTestSeen(moduleName, testName); PlayDeferredEvents(currentTestFileContext, deferredEvents); jsTestCaseStart.TestCase.InputTestFile = currentTestFileContext.ReferencedFile.Path; callback.TestStarted(jsTestCaseStart.TestCase); break; case "TestDone": var jsTestCaseDone = jsonSerializer.Deserialize <JsTestCase>(json); var currentTestIndex = testIndex; FireTestFinished(callback, currentTestFileContext, jsTestCaseDone, currentTestIndex); testIndex++; break; case "Log": var log = jsonSerializer.Deserialize <JsLog>(json); if (currentTestFileContext != null) { FireLogOutput(callback, currentTestFileContext, log); } else { deferredEvents.Add((fileContext) => FireLogOutput(callback, fileContext, log)); } break; case "Error": var error = jsonSerializer.Deserialize <JsError>(json); if (currentTestFileContext != null) { FireErrorOutput(callback, currentTestFileContext, error); } else { deferredEvents.Add((fileContext) => FireErrorOutput(callback, fileContext, error)); } break; } } catch (SerializationException e) { // Ignore malformed json and move on ChutzpahTracer.TraceError(e, "Recieved malformed json from Phantom in this line: '{0}'", line); } } return(testFileContexts.Select(x => x.TestFileSummary).ToList()); }
private TestCaseSummary ProcessTestPaths(IEnumerable <string> testPaths, TestOptions options, TestExecutionMode testExecutionMode, ITestMethodRunnerCallback callback, IChutzpahWebServerHost activeWebServerHost = null) { var overallSummary = new TestCaseSummary(); options.TestExecutionMode = testExecutionMode; options.DebugEnabled = m_debugEnabled; stopWatch.Start(); if (testPaths == null) { throw new ArgumentNullException("testPaths"); } if (fileProbe.FindFilePath(TestRunnerJsName) == null) { throw new FileNotFoundException("Unable to find test runner base js file: " + TestRunnerJsName); } // Concurrent list to collect test contexts var testContexts = new ConcurrentBag <TestContext>(); // Concurrent collection used to gather the parallel results from var testFileSummaries = new ConcurrentQueue <TestFileSummary>(); var resultCount = 0; var cancellationSource = new CancellationTokenSource(); try { // Given the input paths discover the potential test files var scriptPaths = FindTestFiles(testPaths, options); // Group the test files by their chutzpah.json files. Then check if those settings file have batching mode enabled. // If so, we keep those tests in a group together to be used in one context // Otherwise, we put each file in its own test group so each get their own context var testRunConfiguration = BuildTestRunConfiguration(scriptPaths, options); ConfigureTracing(testRunConfiguration); var parallelism = testRunConfiguration.MaxDegreeOfParallelism.HasValue ? Math.Min(options.MaxDegreeOfParallelism, testRunConfiguration.MaxDegreeOfParallelism.Value) : options.MaxDegreeOfParallelism; var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = parallelism, CancellationToken = cancellationSource.Token }; ChutzpahTracer.TraceInformation("Chutzpah run started in mode {0} with parallelism set to {1}", testExecutionMode, parallelOptions.MaxDegreeOfParallelism); // Build test contexts in parallel given a list of files each BuildTestContexts(options, testRunConfiguration.TestGroups, parallelOptions, cancellationSource, resultCount, testContexts, callback, overallSummary); // Compile the test contexts if (!PerformBatchCompile(callback, testContexts)) { return(overallSummary); } // Find the first test context with a web server configuration and use it var webServerHost = SetupWebServerHost(testContexts, options); ActiveWebServerHost = webServerHost; // Build test harness for each context and execute it in parallel ExecuteTestContexts(options, testExecutionMode, callback, testContexts, parallelOptions, testFileSummaries, overallSummary, webServerHost); // Gather TestFileSummaries into TaseCaseSummary foreach (var fileSummary in testFileSummaries) { overallSummary.Append(fileSummary); } stopWatch.Stop(); overallSummary.SetTotalRunTime((int)stopWatch.Elapsed.TotalMilliseconds); overallSummary.TransformResult = transformProcessor.ProcessTransforms(testContexts, overallSummary); ChutzpahTracer.TraceInformation( "Chutzpah run finished with {0} passed, {1} failed and {2} errors", overallSummary.PassedCount, overallSummary.FailedCount, overallSummary.Errors.Count); return(overallSummary); } catch (Exception e) { callback.ExceptionThrown(e); ChutzpahTracer.TraceError(e, "Unhandled exception during Chutzpah test run"); return(overallSummary); } finally { // Clear the settings file cache since in VS Chutzpah is not unloaded from memory. // If we don't clear then the user can never update the file. testSettingsService.ClearCache(); } }
private TestFileSummary ReadFromStream(StreamReader stream, TestContext testContext, TestOptions testOptions, ITestMethodRunnerCallback callback, bool debugEnabled) { var referencedFile = testContext.ReferencedFiles.SingleOrDefault(x => x.IsFileUnderTest); var testIndex = 0; var summary = new TestFileSummary(testContext.InputTestFile); var codeCoverageEnabled = (!testContext.TestFileSettings.EnableCodeCoverage.HasValue && testOptions.CoverageOptions.Enabled) || (testContext.TestFileSettings.EnableCodeCoverage.HasValue && testContext.TestFileSettings.EnableCodeCoverage.Value); if (codeCoverageEnabled) { summary.CoverageObject = new CoverageData(); } string line; while ((line = stream.ReadLine()) != null) { if (debugEnabled) { Console.WriteLine(line); } var match = prefixRegex.Match(line); if (!match.Success) { continue; } var type = match.Groups["type"].Value; var json = match.Groups["json"].Value; // Only update last event timestamp if it is an important event. // Log and error could happen even though no test progress is made if (!type.Equals("Log") && !type.Equals("Error")) { lastTestEvent = DateTime.Now; } try { JsTestCase jsTestCase = null; switch (type) { case "FileStart": callback.FileStarted(testContext.InputTestFile); break; case "CoverageObject": var jsCov = jsonSerializer.Deserialize <JsCoverage>(json); summary.CoverageObject = coverageEngine.DeserializeCoverageObject(jsCov.Object, testContext); break; case "FileDone": var jsFileDone = jsonSerializer.Deserialize <JsFileDone>(json); summary.TimeTaken = jsFileDone.TimeTaken; callback.FileFinished(testContext.InputTestFile, summary); break; case "TestStart": jsTestCase = jsonSerializer.Deserialize <JsTestCase>(json); jsTestCase.TestCase.InputTestFile = testContext.InputTestFile; callback.TestStarted(jsTestCase.TestCase); break; case "TestDone": jsTestCase = jsonSerializer.Deserialize <JsTestCase>(json); jsTestCase.TestCase.InputTestFile = testContext.InputTestFile; AddLineNumber(referencedFile, testIndex, jsTestCase); testIndex++; callback.TestFinished(jsTestCase.TestCase); summary.AddTestCase(jsTestCase.TestCase); break; case "Log": var log = jsonSerializer.Deserialize <JsLog>(json); // This is an internal log message if (log.Log.Message.StartsWith(internalLogPrefix)) { ChutzpahTracer.TraceInformation("Phantom Log - {0}", log.Log.Message.Substring(internalLogPrefix.Length).Trim()); break; } log.Log.InputTestFile = testContext.InputTestFile; callback.FileLog(log.Log); summary.Logs.Add(log.Log); break; case "Error": var error = jsonSerializer.Deserialize <JsError>(json); error.Error.InputTestFile = testContext.InputTestFile; callback.FileError(error.Error); summary.Errors.Add(error.Error); ChutzpahTracer.TraceError("Eror recieved from Phantom {0}", error.Error.Message); break; } } catch (SerializationException e) { // Ignore malformed json and move on ChutzpahTracer.TraceError(e, "Recieved malformed json from Phantom in this line: '{0}'", line); } } return(summary); }
public TestCaseStreamReadResult Read(TestCaseSource <string> testCaseSource, TestOptions testOptions, TestContext testContext, ITestMethodRunnerCallback callback) { if (testCaseSource == null) { throw new ArgumentNullException(nameof(testCaseSource)); } if (testOptions == null) { throw new ArgumentNullException(nameof(testOptions)); } if (testContext == null) { throw new ArgumentNullException(nameof(testContext)); } var testCaseStreamReadResult = new TestCaseStreamReadResult(); var codeCoverageEnabled = testOptions.CoverageOptions.ShouldRunCoverage(testContext.TestFileSettings.CodeCoverageExecutionMode); var streamingTestFileContexts = testContext.ReferencedFiles .Where(x => x.IsFileUnderTest) .Select(x => new StreamingTestFileContext(x, testContext, codeCoverageEnabled)) .ToList(); var deferredEvents = new List <Action <StreamingTestFileContext> >(); if (streamingTestFileContexts.Count == 1) { currentTestFileContext = streamingTestFileContexts.First(); } testCaseSource.Subscribe((line) => ProcessLine(line, testContext, streamingTestFileContexts, deferredEvents, callback, testOptions.DebugEnabled)); var readerTask = testCaseSource.Open(); while ((readerTask.Status == TaskStatus.WaitingToRun || readerTask.Status == TaskStatus.WaitingForActivation || readerTask.Status == TaskStatus.Running) && testCaseSource.IsAlive) { Thread.Sleep(100); } if (readerTask.IsCompleted) { ChutzpahTracer.TraceInformation("Finished reading stream from test file '{0}'", testContext.FirstInputTestFile); testCaseStreamReadResult.TestFileSummaries = streamingTestFileContexts.Select(x => x.TestFileSummary).ToList(); } else { // Since we timed out make sure we play the deferred events so we do not lose errors // We will just attach these events to the first test context at this point since we do // not know where they belong PlayDeferredEvents(streamingTestFileContexts.FirstOrDefault(), deferredEvents); // We timed out so kill the process and return an empty test file summary ChutzpahTracer.TraceError("Test file '{0}' timed out after running for {1} milliseconds", testContext.FirstInputTestFile, (DateTime.Now - testCaseSource.LastTestEvent).TotalMilliseconds); testCaseSource.Dispose(); testCaseStreamReadResult.TimedOut = true; testCaseStreamReadResult.TestFileSummaries = testContext.ReferencedFiles.Where(x => x.IsFileUnderTest).Select(file => new TestFileSummary(file.Path)).ToList(); } return(testCaseStreamReadResult); }