/// <summary> /// Runs the test of an given executable. This implies to run the discover for these sources. /// </summary> /// <param name="sources">The full qualified name of an executable to run</param> /// <param name="runContext">Test context</param> /// <param name="frameworkHandle">Test frame work handle</param> public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { // Load settings from the context. var settings = CatchSettingsProvider.LoadSettings(runContext.RunSettings); frameworkHandle.SendMessage(TestMessageLevel.Informational, "CatchAdapter::RunTests... "); // Run tests in all included executables. foreach (var exeName in sources.Where(name => settings.IncludeTestExe(name))) { // Wrap execution in try to stop one executable's exceptions from stopping the others from being run. try { frameworkHandle.SendMessage(TestMessageLevel.Informational, "RunTest of source " + exeName); var tests = TestDiscoverer.CreateTestCases(exeName, runContext.SolutionDirectory); RunTests(tests, runContext, frameworkHandle); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception running tests: " + ex.Message); frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception stack: " + ex.StackTrace); } } }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { //Debugger.Launch(); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Machine Specifications Visual Studio Test Adapter - Executing Specifications."); int executedSpecCount = 0; Settings settings = GetSettings(runContext); string currentAsssembly = string.Empty; try { foreach (IGrouping <string, TestCase> grouping in tests.GroupBy(x => x.Source)) { currentAsssembly = grouping.Key; frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format("Machine Specifications Visual Studio Test Adapter - Executing tests in {0}", currentAsssembly)); List <VisualStudioTestIdentifier> testsToRun = grouping.Select(test => test.ToVisualStudioTestIdentifier()).ToList(); this.executor.RunAssemblySpecifications(currentAsssembly, testsToRun, settings, uri, frameworkHandle); executedSpecCount += grouping.Count(); } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Machine Specifications Visual Studio Test Adapter - Execution Complete - {0} specifications in {1} assemblies.", executedSpecCount, tests.GroupBy(x => x.Source).Count())); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format("Machine Specifications Visual Studio Test Adapter - Error while executing specifications in assembly {0} - {1}", currentAsssembly, ex.Message)); } finally { } }
/// <summary> /// Log a message /// </summary> /// <param name="message">The message to log</param> /// <param name="logLevel">The log level</param> /// <param name="panicMessage">Is it a panic message</param> public void LogMessage( string message, Settings.LoggingLevel logLevel, bool panicMessage = false) { if (logLevel >= _settings?.Logging) { _frameworkHandle?.SendMessage( TestMessageLevel.Informational, $"[nanoTestAdapter]: {message}"); _logger?.SendMessage( TestMessageLevel.Informational, $"[nanoTestAdapter]: {message}"); } else if (panicMessage) { _frameworkHandle?.SendMessage( TestMessageLevel.Error, $"[nanoTestAdapter] **PANIC**: {message}"); _logger?.SendMessage( TestMessageLevel.Error, $"[nanoTestAdapter] **PANIC**: {message}"); } }
// called by VS when user wants to run all tests public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { try { Log("RunTests(IEnumerable<string> ...) called"); Log($"SolutionDirectory is {runContext.SolutionDirectory}, TestRunDirectory is {runContext.TestRunDirectory}"); // inform user of all test files found frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running from process: " + Process.GetCurrentProcess() + " ID:" + Process.GetCurrentProcess().Id.ToString()); foreach (string filename in sources) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Finding tests in source: " + filename); } // now find and report all of the tests inside each of those files var testDisco = new TestDiscoverer(); IEnumerable <TestCase> allTests = testDisco.GetTestCaseList(sources); foreach (var test in allTests) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Found test: " + test.DisplayName); } // call RunTests(IEnumerable<TestCase> ...) to execute tests RunTests(allTests, runContext, frameworkHandle); } catch (Exception ex) { string errMsg = $"RunTests(IEnumerable<string> ...): Exception thrown , testFilename={ex.ToString()}"; Log(errMsg); Console.Error.WriteLine(errMsg); } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING); int executedSpecCount = 0; string currentAsssembly = string.Empty; try { ISpecificationExecutor specificationExecutor = this.adapterFactory.CreateExecutor(); IEnumerable<IGrouping<string, TestCase>> groupBySource = tests.GroupBy(x => x.Source); foreach (IGrouping<string, TestCase> grouping in groupBySource) { currentAsssembly = grouping.Key; frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly)); specificationExecutor.RunAssemblySpecifications(currentAsssembly, MSpecTestAdapter.uri, runContext, frameworkHandle, grouping); executedSpecCount += grouping.Count(); } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_COMPLETE, executedSpecCount, groupBySource.Count())); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message)); } finally { } }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING); int executedSpecCount = 0; string currentAsssembly = string.Empty; try { ISpecificationExecutor specificationExecutor = this.adapterFactory.CreateExecutor(); IEnumerable <IGrouping <string, TestCase> > groupBySource = tests.GroupBy(x => x.Source); foreach (IGrouping <string, TestCase> grouping in groupBySource) { currentAsssembly = grouping.Key; frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly)); specificationExecutor.RunAssemblySpecifications(currentAsssembly, MSpecTestAdapter.uri, runContext, frameworkHandle, grouping); executedSpecCount += grouping.Count(); } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_COMPLETE, executedSpecCount, groupBySource.Count())); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message)); } finally { } }
/// <summary> /// Runs the tests. /// </summary> /// <param name="testBinaries">Where to look for tests to be run.</param> /// <param name="context">Context in which to run tests.</param> /// <param param name="framework">Where results should be stored.</param> public void RunTests(IEnumerable<string> testBinaries, IRunContext context, IFrameworkHandle framework) { _state = ExecutorState.Running; foreach (var testBinary in testBinaries) { if (_state == ExecutorState.Cancelling) { _state = ExecutorState.Cancelled; return; } var reportDocument = RunOrDebugCatchTest(testBinary, "*", context, framework); var tests = CatchTestDiscoverer.ListTestsInBinary(testBinary); foreach (var test in tests) { try { var result = GetTestResultFromReport(test, reportDocument, framework); framework.RecordResult(result); } catch (Exception ex) { // Log it and move on. It will show up to the user as a test that hasn't been run. framework.SendMessage(TestMessageLevel.Error, "Exception occured when processing test source: " + test.FullyQualifiedName); framework.SendMessage(TestMessageLevel.Informational, "Message: " + ex.Message + "\nStacktrace:" + ex.StackTrace); } } } }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { //Debugger.Launch(); frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING); int executedSpecCount = 0; Settings settings = GetSettings(runContext); string currentAsssembly = string.Empty; try { foreach (IGrouping <string, TestCase> grouping in tests.GroupBy(x => x.Source)) { currentAsssembly = grouping.Key; frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly)); List <VisualStudioTestIdentifier> testsToRun = grouping.Select(test => test.ToVisualStudioTestIdentifier()).ToList(); this.executor.RunAssemblySpecifications(currentAsssembly, testsToRun, settings, uri, frameworkHandle); executedSpecCount += grouping.Count(); } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_COMPLETE, executedSpecCount, tests.GroupBy(x => x.Source).Count())); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message)); } finally { } }
public void Run() { try { _frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Invoking : {_gaugeProcess}"); _gaugeProcess.Start(); _gaugeProcess.BeginOutputReadLine(); if (_isBeingDebugged) { DTEHelper.AttachToProcess(_gaugeProcess.Id); _frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Attaching to ProcessID {_gaugeProcess.Id}"); } _waitForGaugeProcessExit(); } catch (Exception ex) { _frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); foreach (var testCase in _tests) { var result = new TestResult(testCase) { Outcome = TestOutcome.None, ErrorMessage = $"{ex.Message}\n{ex.StackTrace}" }; _frameworkHandle.RecordResult(result); _frameworkHandle.RecordEnd(testCase, result.Outcome); _pendingTests.Remove(testCase); } } }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">Tests to be run.</param> /// <param name="runContext">Context to use when executing the tests.</param> /// <param param name="frameworkHandle">Handle to the framework to record results and to do framework operations.</param> public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { //if (Debugger.IsAttached) Debugger.Break(); //else Debugger.Launch(); try { var parsed = XElement.Parse(runContext.RunSettings.SettingsXml); runContext.RunSettings.GetSettings(AppConfig.Name).Load(parsed.Element(AppConfig.Name).CreateReader()); } catch (Exception ex) { Console.WriteLine($"Framework: Error while loading SettingsXml - {ex.Message} {ex.Data}"); } m_cancelled = false; try { foreach (TestCase test in tests) { if (m_cancelled) { break; } frameworkHandle.RecordStart(test); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Starting external test for " + test.DisplayName); var testOutcome = RunExternalTest(test, runContext, frameworkHandle, test); frameworkHandle.RecordResult(testOutcome); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Test result:" + testOutcome.Outcome.ToString()); frameworkHandle.RecordEnd(test, testOutcome.Outcome); } } catch (Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Framework: Exception during test execution: " + e.Message); frameworkHandle.SendMessage(TestMessageLevel.Error, "Framework: " + e.StackTrace); } }
public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { //Debugger.Launch(); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Machine Specifications Visual Studio Test Adapter - Executing Specifications."); Settings settings = GetSettings(runContext); foreach (string currentAsssembly in sources.Distinct()) { try { #if !NETSTANDARD if (!File.Exists(Path.Combine(Path.GetDirectoryName(Path.GetFullPath(currentAsssembly)), "Machine.Specifications.dll"))) { frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Machine.Specifications.dll not found for {0}", currentAsssembly)); continue; } #endif frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Machine Specifications Visual Studio Test Adapter - Executing tests in {0}", currentAsssembly)); this.executor.RunAssembly(currentAsssembly, settings, uri, frameworkHandle); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, String.Format("Machine Specifications Visual Studio Test Adapter - Error while executing specifications in assembly {0} - {1}", currentAsssembly, ex.Message)); } } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Complete on {0} assemblies ", sources.Count())); }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">Which tests should be run.</param> /// <param name="context">Context in which to run tests.</param> /// <param param name="framework">Where results should be stored.</param> public void RunTests(IEnumerable <TestCase> tests, IRunContext context, IFrameworkHandle framework) { _state = ExecutorState.Running; foreach (var test in tests) { if (_state == ExecutorState.Cancelling) { _state = ExecutorState.Cancelled; return; } try { var reportDocument = RunOrDebugCatchTest(test.Source, test.FullyQualifiedName, context, framework); var result = GetTestResultFromReport(test, reportDocument); framework.RecordResult(result); } catch (Exception ex) { // Log it and move on. It will show up to the user as a test that hasn't been run. framework.SendMessage(TestMessageLevel.Error, "Exception occured when processing test case: " + test.FullyQualifiedName); framework.SendMessage(TestMessageLevel.Informational, "Message: " + ex.Message + "\nStacktrace:" + ex.StackTrace); } } }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">Tests to be run.</param> /// <param name="runContext">Context to use when executing the tests.</param> /// <param param name="frameworkHandle">Handle to the framework to record results and to do framework operations.</param> public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { m_cancelled = false; try { foreach (TestCase test in tests) { if (m_cancelled) { break; } frameworkHandle.RecordStart(test); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Starting external test for " + test.DisplayName); var testOutcome = RunExternalTest(test, runContext, frameworkHandle, test); frameworkHandle.RecordResult(testOutcome); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Test result:" + testOutcome.Outcome.ToString()); } } catch(Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception during test execution: " +e.Message); } }
private void Parse(XPathDocument doc, Dictionary <string, TestResult> pytestIdToResultsMap, IFrameworkHandle frameworkHandle) { var xmlTestResultNodes = doc.CreateNavigator().SelectDescendants("testcase", "", false); foreach (XPathNavigator pytestResultNode in xmlTestResultNodes) { if (_cancelRequested.WaitOne(0)) { break; } try { var pytestId = JunitXmlTestResultParser.GetPytestId(pytestResultNode); if (pytestId != null && pytestIdToResultsMap.TryGetValue(pytestId, out TestResult vsTestResult)) { JunitXmlTestResultParser.UpdateVsTestResult(vsTestResult, pytestResultNode); } else { frameworkHandle.SendMessage(TestMessageLevel.Error, Strings.ErrorTestCaseNotFound.FormatUI(pytestResultNode.OuterXml)); } } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); } } }
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { //Debugger.Launch(); frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING); Settings settings = GetSettings(runContext); foreach (string currentAsssembly in sources.Distinct()) { try { if (!File.Exists(Path.Combine(Path.GetDirectoryName(Path.GetFullPath(currentAsssembly)),"Machine.Specifications.dll"))) { frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Machine.Specifications.dll not found for {0}", currentAsssembly)); continue; } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly)); this.executor.RunAssembly(currentAsssembly, settings, uri, frameworkHandle); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, String.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message)); } } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("Complete on {0} assemblies ", sources.Count())); }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { //Debugger.Launch(); frameworkHandle.SendMessage(TestMessageLevel.Informational, Strings.EXECUTOR_STARTING); int executedSpecCount = 0; Settings settings = GetSettings(runContext); string currentAsssembly = string.Empty; try { foreach (IGrouping<string, TestCase> grouping in tests.GroupBy(x => x.Source)) { currentAsssembly = grouping.Key; frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Format(Strings.EXECUTOR_EXECUTINGIN, currentAsssembly)); List<VisualStudioTestIdentifier> testsToRun = grouping.Select(test => test.ToVisualStudioTestIdentifier()).ToList(); this.executor.RunAssemblySpecifications(currentAsssembly, testsToRun, settings, uri, frameworkHandle); executedSpecCount += grouping.Count(); } frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format(Strings.EXECUTOR_COMPLETE, executedSpecCount, tests.GroupBy(x => x.Source).Count())); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, string.Format(Strings.EXECUTOR_ERROR, currentAsssembly, ex.Message)); } finally { } }
private IEnumerable <TestResult> ParseResults(string resultsXMLPath, IEnumerable <TestCase> testCases, IFrameworkHandle frameworkHandle) { // Default TestResults var pytestIdToResultsMap = testCases.Select(tc => new TestResult(tc) { Outcome = TestOutcome.Skipped }) .ToDictionary(tr => tr.TestCase.GetPropertyValue <string>(Pytest.Constants.PytestIdProperty, String.Empty), tr => tr); if (File.Exists(resultsXMLPath)) { try { var doc = JunitXmlTestResultParser.Read(resultsXMLPath); Parse(doc, pytestIdToResultsMap, frameworkHandle); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); } } else { frameworkHandle.SendMessage(TestMessageLevel.Error, Strings.PytestResultsXmlNotFound.FormatUI(resultsXMLPath)); } return(pytestIdToResultsMap.Values); }
public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { //if (Debugger.IsAttached) Debugger.Break(); //else Debugger.Launch(); try { var parsed = XElement.Parse(runContext.RunSettings.SettingsXml); runContext.RunSettings.GetSettings(AppConfig.Name).Load(parsed.Element(AppConfig.Name).CreateReader()); } catch (Exception ex) { Console.WriteLine($"Framework: Error while loading SettingsXml - {ex.Message} {ex.Data}"); } try { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Running from process:" + Process.GetCurrentProcess() + " ID:" + Process.GetCurrentProcess().Id.ToString()); foreach (var source in sources) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Finding tests in source:" + source); } IEnumerable <TestCase> tests = ProtractorTestDiscoverer.GetTests(sources, null); foreach (var test in tests) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Found test:" + test.DisplayName); } RunTests(tests, runContext, frameworkHandle); } catch (Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Framework: Exception during test execution: " + e.Message); } }
private void RunTestGroup( IGrouping <PythonProjectSettings, TestCase> testGroup, IRunContext runContext, IFrameworkHandle frameworkHandle ) { PythonProjectSettings settings = testGroup.Key; if (settings == null || settings.TestFramework != TestFrameworkType.Pytest) { return; } using (var executor = new ExecutorService(settings, frameworkHandle, runContext)) { bool codeCoverage = ExecutorService.EnableCodeCoverage(runContext); string covPath = null; if (codeCoverage) { covPath = ExecutorService.GetCoveragePath(testGroup); } var resultsXML = executor.Run(testGroup, covPath, _cancelRequested); // Default TestResults var pytestIdToResultsMap = testGroup.Select(tc => new TestResult(tc) { Outcome = TestOutcome.Skipped }) .ToDictionary(tr => tr.TestCase.GetPropertyValue <string>(Pytest.Constants.PytestIdProperty, String.Empty), tr => tr); if (File.Exists(resultsXML)) { try { var doc = JunitXmlTestResultParser.Read(resultsXML); Parse(doc, pytestIdToResultsMap, frameworkHandle); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); } } else { frameworkHandle.SendMessage(TestMessageLevel.Error, Strings.PytestResultsXmlNotFound.FormatUI(resultsXML)); } foreach (var result in pytestIdToResultsMap.Values) { if (_cancelRequested.WaitOne(0)) { break; } frameworkHandle.RecordResult(result); } if (codeCoverage) { ExecutorService.AttachCoverageResults(frameworkHandle, covPath); } } }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">The tests.</param> /// <param name="runContext">The run context.</param> /// <param name="frameworkHandle">The framework handle.</param> public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { // To debug, uncomment the next line // System.Diagnostics.Debugger.Launch(); try { StringBuilder stringBuilder = new StringBuilder(); ConsoleWriter consoleWriter = new ConsoleWriter( value => { if (value == '\0') { return; } stringBuilder.Append(value); string message = stringBuilder.ToString(); if (string.IsNullOrEmpty(message) || (!message.EndsWith(Environment.NewLine))) { return; } if (message == Environment.NewLine) { stringBuilder.Clear(); return; } frameworkHandle.SendMessage(TestMessageLevel.Informational, message); stringBuilder.Clear(); }); Console.SetOut(consoleWriter); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Start executing Silverlight tests. Based on the selected number of tests this might take some time..."); IEnumerable <TestCase> testCases = tests.ToList(); frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Concat(testCases.Count(), " will be executed.")); StatLightWrapper statLightWrapper = new StatLightWrapper(frameworkHandle, runContext.IsBeingDebugged); TaskExecution taskExecution = new TaskExecution(frameworkHandle, statLightWrapper); taskExecution.StartTask(testCases); consoleWriter.Flush(); int num = testCases.Count(); frameworkHandle.SendMessage(TestMessageLevel.Informational, string.Concat("----- Finished execution of ", num.ToString(), " tests. ----- ")); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString()); return; } }
void RunTests(IRunContext runContext, IFrameworkHandle frameworkHandle, Stopwatch stopwatch, Func <XunitVisualStudioSettings, IEnumerable <IGrouping <string, TestCase> > > testCaseAccessor) { Guard.ArgumentNotNull("runContext", runContext); Guard.ArgumentNotNull("frameworkHandle", frameworkHandle); var settings = SettingsProvider.Load(); var shuttingDown = !runContext.KeepAlive || settings.ShutdownAfterRun; if (runContext.KeepAlive && settings.ShutdownAfterRun) { frameworkHandle.EnableShutdownAfterTestRun = true; } var toDispose = new List <IDisposable>(); if (settings.MessageDisplay == MessageDisplay.Diagnostic) { lock (stopwatch) frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("[xUnit.net {0}] Execution started", stopwatch.Elapsed)); } try { RemotingUtility.CleanUpRegisteredChannels(); cancelled = false; using (AssemblyHelper.SubscribeResolve()) if (settings.ParallelizeAssemblies) { testCaseAccessor(settings) .Select(testCaseGroup => RunTestsInAssemblyAsync(frameworkHandle, toDispose, testCaseGroup.Key, testCaseGroup, settings, stopwatch)) .ToList() .ForEach(@event => @event.WaitOne()); } else { testCaseAccessor(settings) .ToList() .ForEach(testCaseGroup => RunTestsInAssembly(frameworkHandle, toDispose, testCaseGroup.Key, testCaseGroup, settings, stopwatch)); } } finally { if (!shuttingDown) { toDispose.ForEach(disposable => disposable.Dispose()); } } if (settings.MessageDisplay == MessageDisplay.Diagnostic) { lock (stopwatch) frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("[xUnit.net {0}] Execution complete", stopwatch.Elapsed)); } }
void RunTestsInAssembly(IDiscoveryContext discoveryContext, IFrameworkHandle frameworkHandle, List <IDisposable> toDispose, string assemblyFileName, IEnumerable <TestCase> testCases, XunitVisualStudioSettings settings, Stopwatch stopwatch) { if (cancelled) { return; } if (settings.MessageDisplay == MessageDisplay.Diagnostic) { lock (stopwatch) frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("[xUnit.net {0}] Execution starting: {1}", stopwatch.Elapsed, Path.GetFileName(assemblyFileName))); } #if WIN8_STORE // For store apps, the files are copied to the AppX dir, we need to load it from there assemblyFileName = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), Path.GetFileName(assemblyFileName)); #elif WINDOWS_PHONE_APP // For WPA Apps, use the package location assemblyFileName = Path.Combine(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, Path.GetFileName(assemblyFileName)); #endif var controller = new XunitFrontController(assemblyFileName, configFileName: null, shadowCopy: true); lock (toDispose) toDispose.Add(controller); var xunitTestCases = testCases.ToDictionary(tc => controller.Deserialize(tc.GetPropertyValue <string>(SerializedTestCaseProperty, null))); using (var executionVisitor = new VsExecutionVisitor(discoveryContext, frameworkHandle, xunitTestCases, () => cancelled)) { var executionOptions = new XunitExecutionOptions { DisableParallelization = !settings.ParallelizeTestCollections, MaxParallelThreads = settings.MaxParallelThreads }; controller.RunTests(xunitTestCases.Keys.ToList(), executionVisitor, executionOptions); executionVisitor.Finished.WaitOne(); } if (settings.MessageDisplay == MessageDisplay.Diagnostic) { lock (stopwatch) frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("[xUnit.net {0}] Execution finished: {1}", stopwatch.Elapsed, Path.GetFileName(assemblyFileName))); } }
private void LogDebug(TestMessageLevel level, string msg) { if (_frameworkHandle == null) { return; } if (_settings == null || _settings.LoggingLevel == Catch2Interface.LoggingLevels.Debug) { _frameworkHandle.SendMessage(level, msg); } }
void RunTestsInAssembly(IFrameworkHandle frameworkHandle, List <IDisposable> toDispose, AssemblyRunInfo runInfo, Stopwatch stopwatch) { if (cancelled) { return; } var assemblyFileName = runInfo.AssemblyFileName; var assemblyDisplayName = Path.GetFileNameWithoutExtension(assemblyFileName); if (runInfo.Configuration.DiagnosticMessagesOrDefault) { lock (stopwatch) frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("[xUnit.net {0}] Execution starting: {1} (method display = {2}, parallel test collections = {3}, max threads = {4})", stopwatch.Elapsed, assemblyDisplayName, runInfo.Configuration.MethodDisplayOrDefault, runInfo.Configuration.ParallelizeTestCollectionsOrDefault, runInfo.Configuration.MaxParallelThreadsOrDefault)); } #if WINDOWS_PHONE_APP || WINDOWS_APP // For AppX Apps, use the package location assemblyFileName = Path.Combine(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, Path.GetFileName(assemblyFileName)); #endif var diagnosticMessageVisitor = new DiagnosticMessageVisitor(frameworkHandle, assemblyDisplayName, runInfo.Configuration.DiagnosticMessagesOrDefault, stopwatch); var controller = new XunitFrontController(assemblyFileName, configFileName: null, shadowCopy: true, diagnosticMessageSink: diagnosticMessageVisitor); lock (toDispose) toDispose.Add(controller); var xunitTestCases = runInfo.TestCases.ToDictionary(tc => controller.Deserialize(tc.GetPropertyValue <string>(SerializedTestCaseProperty, null))); var executionOptions = TestFrameworkOptions.ForExecution(runInfo.Configuration); using (var executionVisitor = new VsExecutionVisitor(frameworkHandle, xunitTestCases, executionOptions, () => cancelled)) { controller.RunTests(xunitTestCases.Keys.ToList(), executionVisitor, executionOptions); executionVisitor.Finished.WaitOne(); } if (runInfo.Configuration.DiagnosticMessagesOrDefault) { lock (stopwatch) frameworkHandle.SendMessage(TestMessageLevel.Informational, String.Format("[xUnit.net {0}] Execution finished: {1}", stopwatch.Elapsed, assemblyDisplayName)); } }
/// <summary> /// Run all given test cases. /// </summary> /// <param name="testsToRun">List of test cases</param> /// <param name="runContext">Run context</param> /// <param name="frameworkHandle">Test frame work handle</param> public void RunTests(IEnumerable <TestCase> testsToRun, IRunContext runContext, IFrameworkHandle frameworkHandle) { SolutionDirectory = runContext.SolutionDirectory; this.frameworkHandle = frameworkHandle; var tests = testsToRun.ToList(); #if DEBUG frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Run tests :"); foreach (var t in tests) { frameworkHandle.SendMessage(TestMessageLevel.Informational, $"\tDisplayName={t.DisplayName} Full={t.FullyQualifiedName} ID={t.Id}"); } #endif var timer = Stopwatch.StartNew(); var listOfExes = tests.Select(t => t.Source).Distinct(); foreach (var CatchExe in listOfExes) { var listOfTestCasesOfSource = from test in tests where test.Source == CatchExe select test.DisplayName; var listOfTestCases = listOfTestCasesOfSource.Aggregate("", (acc, name) => acc + name + Environment.NewLine); #if DEBUG frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Run {CatchExe} with Tests:{Environment.NewLine}{listOfTestCases}"); #endif // Use the directory of the executable as the working directory. string workingDirectory = System.IO.Path.GetDirectoryName(CatchExe); if (workingDirectory == "") { workingDirectory = "."; } // Write them to the input file for Catch runner const string caseFile = "test.cases"; System.IO.File.WriteAllText(workingDirectory + System.IO.Path.DirectorySeparatorChar + caseFile, listOfTestCases); // Execute the tests string arguments = "-r xml --durations yes --input-file=" + caseFile; var output_text = ProcessRunner.RunProcess(frameworkHandle, CatchExe, arguments, workingDirectory, runContext.IsBeingDebugged); timer.Stop(); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Overall time " + timer.Elapsed.ToString()); ReportResults(output_text, tests.ToList(), frameworkHandle); // Remove the temporary input file. System.IO.File.Delete(caseFile); } }
public async Task RunTestCases(IEnumerable <TestCase> testCases, IFrameworkHandle frameworkHandle, CancellationToken cancellationToken) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running tests"); var runHooks = new RunHooks( testRunContext, testCases .Select( test => test .DiscoveredData() .Assembly) .Distinct()); await runHooks .ExecuteBeforeRun() .ConfigureAwait(false); using var simultaneousTestCasesSemaphore = new SemaphoreSlim(testRunContext.MaximumSimultaneousTestCases); var stepBinder = new StepBinder(); var tasks = new List <Task>(); foreach (var testCase in testCases) { if (cancellationToken.IsCancellationRequested) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Test run cancelled"); break; } if (testCase.DiscoveredData().IsIgnored) { testCase.MarkAsSkipped(frameworkHandle); continue; } tasks.Add( Run(testCase, testCase.DiscoveredData(), testRunContext, stepBinder, frameworkHandle, simultaneousTestCasesSemaphore)); } await Task .WhenAll(tasks) .ConfigureAwait(false); await runHooks .ExecuteAfterRun() .ConfigureAwait(false); }
public void RunTests(IEnumerable <TestCase> tests, IRunContext context, IFrameworkHandle handle) { if (!Trace.Listeners.OfType <TestResultTraceListener>().Any()) { Trace.Listeners.Add(new TestResultTraceListener()); } var services = new ServiceCollection(); var fixture = new TestFixture(services); // allow registration in test constructors fixture.TryAddSingleton(fixture); fixture.TryAddSingleton <IServiceCollection>(fixture); fixture.TryAddSingleton <IServiceProvider>(fixture); using var ctx = new TestContext(fixture, new VsTestMessageSink(handle)); var testPlan = BuildTestPlan(tests, handle, fixture); ctx.Begin(); { foreach (var test in testPlan.Keys) { handle.SendMessage(TestMessageLevel.Informational, $"Evaluating test at path {Path.GetFileName(test.Source)}"); var(method, instance) = testPlan[test]; ExecuteStandardTest(handle, test, method, ctx, instance); } } ctx.End(); }
private static void RunLocally(IFrameworkHandle frameworkHandle, IGrouping <string, TestCase> source) { var discoverer = new TestDiscoverer(); var tests2 = discoverer.LoadFromSources(new[] { source.Key }, x => frameworkHandle.SendMessage(TestMessageLevel.Informational, x)); if (!tests2.Any()) { return; } var runner = new TestRunner(discoverer); runner.Load(new[] { tests2[0].Type.Assembly }).GetAwaiter().GetResult(); foreach (var testCase in source) { var testClassName = (string)testCase.GetPropertyValue(Constants.TestClassProperty); var testName = (string)testCase.GetPropertyValue(Constants.TestMethodProperty); var method = discoverer.Get(testClassName, testName); if (method == null) { frameworkHandle.RecordResult(new TestResult(testCase) { Outcome = TestOutcome.NotFound }); continue; } RunTestMethod(frameworkHandle, testCase, runner, method); } runner.Shutdown().GetAwaiter().GetResult(); }
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { if (sources == null) { throw new ArgumentNullException(nameof(sources)); } if (runContext == null) { throw new ArgumentNullException(nameof(runContext)); } if (frameworkHandle == null) { throw new ArgumentNullException(nameof(frameworkHandle)); } _cancelRequested.Reset(); var executorUri = new Uri(PythonConstants.TestExecutorUriString); var tests = new List<TestCase>(); var doc = Read(runContext.RunSettings.SettingsXml); foreach (var t in TestReader.ReadTests(doc, new HashSet<string>(sources, StringComparer.OrdinalIgnoreCase), m => { frameworkHandle?.SendMessage(TestMessageLevel.Warning, m); })) { tests.Add(new TestCase(t.FullyQualifiedName, executorUri, t.SourceFile) { DisplayName = t.DisplayName, LineNumber = t.LineNo, CodeFilePath = t.SourceFile }); } if (_cancelRequested.WaitOne(0)) { return; } RunTestCases(tests, runContext, frameworkHandle); }
public void RunTests(IEnumerable <string> sources, IRunContext context, IFrameworkHandle handle) { foreach (var source in sources) { handle.SendMessage(TestMessageLevel.Informational, $"Running all tests in {Path.GetFileName(source)}"); } }
public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { ValidateArg.NotNull(sources, "sources"); ValidateArg.NotNull(runContext, "runContext"); ValidateArg.NotNull(frameworkHandle, "frameworkHandle"); _cancelRequested.Reset(); var executorUri = new Uri(PythonConstants.TestExecutorUriString); var tests = new List <TestCase>(); var doc = new XPathDocument(new StringReader(runContext.RunSettings.SettingsXml)); foreach (var t in TestReader.ReadTests(doc, new HashSet <string>(sources, StringComparer.OrdinalIgnoreCase), m => { frameworkHandle?.SendMessage(TestMessageLevel.Warning, m); })) { tests.Add(new TestCase(t.FullyQualifiedName, executorUri, t.SourceFile) { DisplayName = t.DisplayName, LineNumber = t.LineNo, CodeFilePath = t.SourceFile }); } if (_cancelRequested.WaitOne(0)) { return; } RunTestCases(tests, runContext, frameworkHandle); }
internal static void RunTests( IQmlTestRunner qmlTestRunner, string source, IEnumerable <TestCase> testCases, IFrameworkHandle frameworkHandle, IDiscoveryContext context) { try { foreach (TestCase testCase in testCases) { frameworkHandle.RecordStart(testCase); } Dictionary <string, TestCase> dict = testCases.ToDictionary(tc => tc.FullyQualifiedName); string functions = String.Join(" ", testCases.Select(tc => "\"" + tc.FullyQualifiedName + "\"")); string arguments = "-xml -input " + source + " " + functions; QmlTestRunnerResult result = qmlTestRunner.Execute(arguments, context); ParseQmlTestRunnerXmlOutput(frameworkHandle, dict, result.StandardOutput); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.StackTrace); } }
public void RunTests(IEnumerable <string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { PlatformServiceProvider.Instance.AdapterTraceLogger.LogInfo("MSTestExecutor.RunTests: Running tests from sources."); ValidateArg.NotNull(frameworkHandle, "frameworkHandle"); ValidateArg.NotNullOrEmpty(sources, "sources"); if (!this.MSTestDiscoverer.AreValidSources(sources)) { throw new NotSupportedException(); } // Populate the runsettings. try { MSTestSettings.PopulateSettings(runContext); } catch (AdapterSettingsException ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); return; } // Scenarios that include testsettings or forcing a run via the legacy adapter are currently not supported in MSTestAdapter. if (MSTestSettings.IsLegacyScenario(frameworkHandle)) { return; } sources = PlatformServiceProvider.Instance.TestSource.GetTestSources(sources); this.cancellationToken = new TestRunCancellationToken(); this.TestExecutionManager.RunTests(sources, runContext, frameworkHandle, this.cancellationToken); this.cancellationToken = null; }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { ValidateArg.NotNull(frameworkHandle, "frameworkHandle"); ValidateArg.NotNullOrEmpty(tests, "tests"); if (!this.MSTestDiscoverer.AreValidSources(from test in tests select test.Source)) { throw new NotSupportedException(); } // Populate the runsettings. try { MSTestSettings.PopulateSettings(runContext); } catch (AdapterSettingsException ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); return; } // Scenarios that include testsettings or forcing a run via the legacy adapter are currently not supported in MSTestAdapter. if (MSTestSettings.IsLegacyScenario(frameworkHandle)) { return; } this.cancellationToken = new TestRunCancellationToken(); this.TestExecutionManager.RunTests(tests, runContext, frameworkHandle, this.cancellationToken); this.cancellationToken = null; }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { try { var sources = tests .Select(test => test.Source) .Distinct(); var testsMappedToScenarios = TestDiscoverer .DiscoverTests(sources, frameworkHandle) .ToArray(); var unmappedTests = tests .Where(test => !testsMappedToScenarios.Any(mappedTest => mappedTest.Id == test.Id)); unmappedTests.MarkAsNotFound(frameworkHandle); var mappedTests = tests .Select( test => testsMappedToScenarios .FirstOrDefault( mappedTest => mappedTest.Id == test.Id)) .Where(test => test != null); RunMappedTests(mappedTests, frameworkHandle); } catch (Exception exception) { frameworkHandle.SendMessage( TestMessageLevel.Error, $"Skipping test run because of an early exception: {exception}"); tests.TryMarkAsFailed(frameworkHandle, exception.Message, exception.StackTrace); } }
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { _cancelled = false; foreach (string fileName in sources) { if (_cancelled) break; try { RunFileOrTest(frameworkHandle, runContext, fileName); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process Done."); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception spawning nodeunit.cmd: " + ex.ToString()); } } }
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { this.frameworkHandle = frameworkHandle; foreach (var source in sources) { using(var sandbox = new Sandbox<Executor>(source)) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running: " + source); sandbox.Content.Execute(this); } } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { this.frameworkHandle = frameworkHandle; foreach (var group in tests.GroupBy(t => t.Source)) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running selected: " + group.Key); using (var sandbox = new Sandbox<Executor>(group.Key)) { sandbox.Content.Execute(this, group.Select(t => t.FullyQualifiedName).ToArray()); } } }
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { try { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Running from process:" + Process.GetCurrentProcess() + " ID:" + Process.GetCurrentProcess().Id.ToString()); foreach (var source in sources) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Finding tests in source:" + source); } IEnumerable<TestCase> tests = ProtractorTestDiscoverer.GetTests(sources, null); foreach (var test in tests) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Found test:" + test.DisplayName); } RunTests(tests, runContext, frameworkHandle); } catch(Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception during test execution: " +e.Message); } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { var settingsProvider = runContext.RunSettings.GetSettings(VSTestSettings.SettingsName) as VSTestSettingsService; VSTestSettings settings; if (settingsProvider != null) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "Found settings."); settings = settingsProvider.Settings; } else { frameworkHandle.SendMessage(TestMessageLevel.Informational, "No settings found. Using defaults."); settings = new VSTestSettings(); } frameworkHandle.SendMessage(TestMessageLevel.Informational, settings.WorkingDirectory); _frameworkHandle = frameworkHandle; ITestLogger logger = new VSLogger(frameworkHandle); GTestConverter converter = new GTestConverter(); IEnumerable<ITestSuite> suites = converter.ConvertToGTest(tests.ToArray(), logger); foreach (var suite in suites) { logger.Information(string.Format("Processing suite {0}...", suite.RunTarget)); VSTracker tracker = new VSTracker(frameworkHandle, suite); GTestRunner runner = new GTestRunner(logger, false); runner.TestCompleted += tracker.TestCompleted; logger.Information(string.Format("Running suite {0}...", suite.RunTarget)); runner.Run(suite); } }
private void RunTestCases( IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle ) { // .pyproj file path -> project settings var sourceToSettings = new Dictionary<string, PythonProjectSettings>(); foreach (var test in tests) { if (_cancelRequested.WaitOne(0)) { break; } try { RunTestCase(frameworkHandle, runContext, test, sourceToSettings); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString()); } } }
private void RunTestCase( IFrameworkHandle frameworkHandle, IRunContext runContext, TestCase test, Dictionary<string, PythonProjectSettings> sourceToSettings ) { var testResult = new TestResult(test); frameworkHandle.RecordStart(test); testResult.StartTime = DateTimeOffset.Now; PythonProjectSettings settings; if (!sourceToSettings.TryGetValue(test.Source, out settings)) { sourceToSettings[test.Source] = settings = LoadProjectSettings(test.Source, _interpreterService); } if (settings == null) { frameworkHandle.SendMessage( TestMessageLevel.Error, "Unable to determine interpreter to use for " + test.Source); RecordEnd( frameworkHandle, test, testResult, null, "Unable to determine interpreter to use for " + test.Source, TestOutcome.Failed); return; } var debugMode = PythonDebugMode.None; if (runContext.IsBeingDebugged && _app != null) { debugMode = settings.EnableNativeCodeDebugging ? PythonDebugMode.PythonAndNative : PythonDebugMode.PythonOnly; } var testCase = new PythonTestCase(settings, test, debugMode); var dte = _app != null ? _app.GetDTE() : null; if (dte != null && debugMode != PythonDebugMode.None) { dte.Debugger.DetachAll(); } if (!File.Exists(settings.Factory.Configuration.InterpreterPath)) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.Factory.Configuration.InterpreterPath); return; } var env = new Dictionary<string, string>(); var pythonPathVar = settings.Factory.Configuration.PathEnvironmentVariable; var pythonPath = testCase.SearchPaths; if (!string.IsNullOrWhiteSpace(pythonPathVar)) { if (_app != null) { var settingsManager = SettingsManagerCreator.GetSettingsManager(dte); if (settingsManager != null) { var store = settingsManager.GetReadOnlySettingsStore(SettingsScope.UserSettings); if (store != null && store.CollectionExists(@"PythonTools\Options\General")) { var settingStr = store.GetString(@"PythonTools\Options\General", "ClearGlobalPythonPath", "True"); bool settingBool; if (bool.TryParse(settingStr, out settingBool) && !settingBool) { pythonPath += ";" + Environment.GetEnvironmentVariable(pythonPathVar); } } } } env[pythonPathVar] = pythonPath; } foreach (var envVar in testCase.Environment) { env[envVar.Key] = envVar.Value; } using (var proc = ProcessOutput.Run( !settings.IsWindowsApplication ? settings.Factory.Configuration.InterpreterPath : settings.Factory.Configuration.WindowsInterpreterPath, testCase.Arguments, testCase.WorkingDirectory, env, false, null )) { bool killed = false; #if DEBUG frameworkHandle.SendMessage(TestMessageLevel.Informational, "cd " + testCase.WorkingDirectory); frameworkHandle.SendMessage(TestMessageLevel.Informational, "set " + (pythonPathVar ?? "") + "=" + (pythonPath ?? "")); frameworkHandle.SendMessage(TestMessageLevel.Informational, proc.Arguments); #endif proc.Wait(TimeSpan.FromMilliseconds(500)); if (debugMode != PythonDebugMode.None) { if (proc.ExitCode.HasValue) { // Process has already exited frameworkHandle.SendMessage(TestMessageLevel.Error, "Failed to attach debugger because the process has already exited."); if (proc.StandardErrorLines.Any()) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Standard error from Python:"); foreach (var line in proc.StandardErrorLines) { frameworkHandle.SendMessage(TestMessageLevel.Error, line); } } } try { if (debugMode == PythonDebugMode.PythonOnly) { string qualifierUri = string.Format("tcp://{0}@localhost:{1}", testCase.DebugSecret, testCase.DebugPort); while (!_app.AttachToProcess(proc, PythonRemoteDebugPortSupplierUnsecuredId, qualifierUri)) { if (proc.Wait(TimeSpan.FromMilliseconds(500))) { break; } } } else { var engines = new[] { PythonDebugEngineGuid, VSConstants.DebugEnginesGuids.NativeOnly_guid }; while (!_app.AttachToProcess(proc, engines)) { if (proc.Wait(TimeSpan.FromMilliseconds(500))) { break; } } } #if DEBUG } catch (COMException ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee."); frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString()); try { proc.Kill(); } catch (InvalidOperationException) { // Process has already exited } killed = true; } #else } catch (COMException) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee."); try { proc.Kill(); } catch (InvalidOperationException) { // Process has already exited } killed = true; } #endif } // https://pytools.codeplex.com/workitem/2290 // Check that proc.WaitHandle was not null to avoid crashing if // a test fails to start running. We will report failure and // send the error message from stdout/stderr. var handles = new WaitHandle[] { _cancelRequested, proc.WaitHandle }; if (handles[1] == null) { killed = true; } if (!killed && WaitHandle.WaitAny(handles) == 0) { try { proc.Kill(); } catch (InvalidOperationException) { // Process has already exited } killed = true; } else { RecordEnd(frameworkHandle, test, testResult, string.Join(Environment.NewLine, proc.StandardOutputLines), string.Join(Environment.NewLine, proc.StandardErrorLines), (proc.ExitCode == 0 && !killed) ? TestOutcome.Passed : TestOutcome.Failed); } }
private void RunTestCases(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { // May be null, but this is handled by RunTestCase if it matters. // No VS instance just means no debugging, but everything else is // okay. using (var app = VisualStudioApp.FromEnvironmentVariable(NodejsConstants.NodeToolsProcessIdEnvironmentVariable)) { // .njsproj file path -> project settings var sourceToSettings = new Dictionary<string, NodejsProjectSettings>(); foreach (var test in tests) { if (_cancelRequested.WaitOne(0)) { break; } try { RunTestCase(app, frameworkHandle, runContext, test, sourceToSettings); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString()); } } } }
private static void RunFileOrTest(IFrameworkHandle frameworkHandle, IRunContext runContext, string fileName, string testName = null) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "runContext.SolutionDirectory: " + runContext.SolutionDirectory); frameworkHandle.SendMessage(TestMessageLevel.Informational, "runContext.TestRunDirectory: " + runContext.TestRunDirectory); frameworkHandle.SendMessage(TestMessageLevel.Informational, "source: " + fileName); string nodeFullPath = NodeJsHelper.LocateNodeJs(); Process proc = new Process(); proc.StartInfo.FileName = nodeFullPath; proc.StartInfo.WorkingDirectory = Path.GetDirectoryName(fileName); proc.StartInfo.UseShellExecute = false; proc.StartInfo.RedirectStandardInput = true; proc.StartInfo.RedirectStandardOutput = true; proc.StartInfo.RedirectStandardError = true; proc.StartInfo.CreateNoWindow = true; proc.OutputDataReceived += (sender, args) => { var data = args.Data; if (!string.IsNullOrEmpty(data)) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "> " + data); if (data.Contains("Error: Cannot find module 'nodeunit'")) { if (!string.IsNullOrEmpty(testName)) { GenericFailTest(frameworkHandle, fileName, testName, data); } } else { try { var result = JsonConvert.DeserializeObject<NodeUnitTestResult>(data); if (result != null && !string.IsNullOrEmpty(result.TestName)) { var testCase = new TestCase(result.TestName, NodeUnitTestExecutor.ExecutorUri, fileName) { DisplayName = result.TestName }; var testResult = new TestResult(testCase) { DisplayName = result.TestName }; testResult.Duration = TimeSpan.FromSeconds(Math.Max(.001, result.Duration)); testResult.Outcome = result.Passed ? TestOutcome.Passed : TestOutcome.Failed; if (result.Assertions.Length > 0) { var first = result.Assertions.First(); testResult.ErrorStackTrace = FormatStackTrace(first.Stack); testResult.ErrorMessage = first.Message; } frameworkHandle.SendMessage(TestMessageLevel.Informational, "Recording Result for " + testCase.DisplayName + " (" + testResult.Outcome.ToString() + ")"); frameworkHandle.RecordResult(testResult); } } catch (Newtonsoft.Json.JsonException) { //frameworkHandle.SendMessage(TestMessageLevel.Informational, data); } } } }; proc.ErrorDataReceived += (sender, args) => { if (!string.IsNullOrEmpty(args.Data)) { frameworkHandle.SendMessage(TestMessageLevel.Warning, "^ " + args.Data); if (args.Data.Contains("Error: Cannot find module 'nodeunit'")) { if (!string.IsNullOrEmpty(testName)) { GenericFailTest(frameworkHandle, fileName, testName, args.Data); } } } }; frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process FileName: " + proc.StartInfo.FileName); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process Arguments: " + proc.StartInfo.Arguments); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process WorkingDirectory: " + proc.StartInfo.WorkingDirectory); proc.Start(); proc.BeginOutputReadLine(); proc.BeginErrorReadLine(); proc.StandardInput.Write(Resources.RunTests); string testFile = Path.GetFileName(fileName).Replace("\\", "\\\\"); string jsCommand = "runTests(\"" + testFile + "\""; if (!string.IsNullOrEmpty(testName)) jsCommand += ", \"" + testName + "\""; jsCommand += ");"; frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process Emitting Command: " + jsCommand); proc.StandardInput.Write(jsCommand); proc.StandardInput.Close(); proc.WaitForExit(); }
private static void GenericFailTest(IFrameworkHandle frameworkHandle, string fileName, string testName, string message = null) { var testCase = new TestCase(testName, NodeUnitTestExecutor.ExecutorUri, fileName) { DisplayName = testName }; var testResult = new TestResult(testCase) { DisplayName = testName }; testResult.Outcome = TestOutcome.Failed; testResult.ErrorMessage = message; frameworkHandle.SendMessage(TestMessageLevel.Informational, "Recording Result for " + testCase.DisplayName + " (" + testResult.Outcome.ToString() + ")"); frameworkHandle.RecordResult(testResult); }
private string RunProtractor(TestCase test, IRunContext runContext, IFrameworkHandle frameworkHandle) { var resultFile = Path.GetFileNameWithoutExtension(test.Source); resultFile += ".result.json"; resultFile = Path.Combine(Path.GetTempPath(), resultFile); frameworkHandle.SendMessage(TestMessageLevel.Informational, "result file: " + resultFile); ProcessStartInfo info = new ProcessStartInfo() { Arguments = string.Format("--resultJsonOutputFile \"{0}\" --specs \"{1}\" --framework jasmine", resultFile, test.Source), FileName = "protractor.cmd" }; frameworkHandle.SendMessage(TestMessageLevel.Informational, "starting protractor with arguments:" + info.Arguments); Process p = new Process(); p.StartInfo = info; p.Start(); p.WaitForExit(); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Protractor run done exit code:"+ p.ExitCode.ToString()); return resultFile; }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { _mCancelled = false; SetupExecutionPolicy(); foreach (var test in tests) { if (_mCancelled) break; var testFramework = test.FullyQualifiedName.Split(new[] { "||" }, StringSplitOptions.None)[0]; var executor = _testExecutors.FirstOrDefault( m => m.TestFramework.Equals(testFramework, StringComparison.OrdinalIgnoreCase)); if (executor == null) { frameworkHandle.SendMessage(TestMessageLevel.Error, String.Format("Unknown test executor: {0}", testFramework)); return; } var testResult = new TestResult(test); testResult.Outcome = TestOutcome.Failed; testResult.ErrorMessage = "Unexpected error! Failed to run tests!"; PowerShellTestResult testResultData = null; var testOutput = new StringBuilder(); try { var testAdapter = new TestAdapterHost(); testAdapter.HostUi.OutputString = s => testOutput.Append(s); var runpsace = RunspaceFactory.CreateRunspace(testAdapter); runpsace.Open(); using (var ps = PowerShell.Create()) { ps.Runspace = runpsace; testResultData = executor.RunTest(ps, test, runContext); } } catch (Exception ex) { testResult.Outcome = TestOutcome.Failed; testResult.ErrorMessage = ex.Message; testResult.ErrorStackTrace = ex.StackTrace; } if (testResultData != null) { testResult.Outcome = testResultData.Outcome; testResult.ErrorMessage = testResultData.ErrorMessage; testResult.ErrorStackTrace = testResultData.ErrorStacktrace; } if (testOutput.Length > 0) { frameworkHandle.SendMessage(TestMessageLevel.Informational, testOutput.ToString()); } frameworkHandle.RecordResult(testResult); } }
public void RunTests(IEnumerable<string> sources, IRunContext runContext, IFrameworkHandle frameworkHandle) { foreach(var item in sources) frameworkHandle.SendMessage(TestMessageLevel.Informational, $"{item}"); }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { foreach (var test in tests) { if (this.canceled) { return; } var result = new TestResult(test); var target = System.IO.Path.ChangeExtension(test.Source, ".feature"); try { System.IO.File.Copy(test.Source, target); var appDataPath = Environment.GetEnvironmentVariable("APPDATA"); var nodePath = System.IO.Path.Combine(appDataPath, "npm"); var cucumberPath = System.IO.Path.Combine(nodePath, "node_modules\\cucumber\\bin\\cucumber.js"); System.Diagnostics.ProcessStartInfo procStartInfo = runContext.IsBeingDebugged ? new System.Diagnostics.ProcessStartInfo("node", $"--debug=5858 \"{cucumberPath}\" \"{target}:{test.LineNumber}\" -f json") : new System.Diagnostics.ProcessStartInfo("node", $"\"{cucumberPath}\" \"{target}:{test.LineNumber}\" -f json"); // The following commands are needed to redirect the standard output. // This means that it will be redirected to the Process.StandardOutput StreamReader. procStartInfo.RedirectStandardOutput = true; procStartInfo.RedirectStandardError = true; procStartInfo.UseShellExecute = false; procStartInfo.CreateNoWindow = true; System.Diagnostics.Process proc = new System.Diagnostics.Process(); proc.StartInfo = procStartInfo; proc.Start(); if (runContext.IsBeingDebugged) { DteHelpers.DebugAttachToNode(proc.Id, 5678); } proc.WaitForExit(); var error = proc.StandardError.ReadToEnd(); var output = proc.StandardOutput.ReadToEnd(); var features = JArray.Parse(output); // frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, output); foreach (var feature in features) { frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{feature["keyword"]}: {feature["name"]}"); foreach (var element in feature["elements"]) { frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{element["keyword"]}: {element["name"]}"); frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{element["description"]}"); bool passed = true; var duration = 0L; foreach (var step in element["steps"]) { var message = $"{step["keyword"]}{step["name"]}"; duration = duration + (long)step["result"]["duration"]; frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, message); if ((string)step["result"]["status"] == "failed") { result.ErrorMessage = (string)step["result"]["error_message"]; frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{result.ErrorMessage}"); passed = false; } } result.Duration = TimeSpan.FromTicks(duration); if (passed) { result.Outcome = TestOutcome.Passed; } else { result.Outcome = TestOutcome.Failed; } } } } catch (Exception ex) { result.Outcome = TestOutcome.Failed; result.ErrorMessage = ex.Message + ex.StackTrace; } finally { System.IO.File.Delete(target); } frameworkHandle.RecordResult(result); } }
private void RunTestCase(VisualStudioApp app, IFrameworkHandle frameworkHandle, IRunContext runContext, TestCase test, Dictionary<string, NodejsProjectSettings> sourceToSettings) { var testResult = new TestResult(test); frameworkHandle.RecordStart(test); testResult.StartTime = DateTimeOffset.Now; NodejsProjectSettings settings; if (!sourceToSettings.TryGetValue(test.Source, out settings)) { sourceToSettings[test.Source] = settings = LoadProjectSettings(test.Source); } if (settings == null) { frameworkHandle.SendMessage( TestMessageLevel.Error, "Unable to determine interpreter to use for " + test.Source); RecordEnd( frameworkHandle, test, testResult, null, "Unable to determine interpreter to use for " + test.Source, TestOutcome.Failed); return; } NodejsTestInfo testInfo = new NodejsTestInfo(test.FullyQualifiedName); List<string> args = new List<string>(); int port = 0; if (runContext.IsBeingDebugged && app != null) { app.GetDTE().Debugger.DetachAll(); args.AddRange(GetDebugArgs(settings, out port)); } var workingDir = Path.GetDirectoryName(CommonUtils.GetAbsoluteFilePath(settings.WorkingDir, testInfo.ModulePath)); args.AddRange(GetInterpreterArgs(test, workingDir, settings.ProjectRootDir)); //Debug.Fail("attach debugger"); if (!File.Exists(settings.NodeExePath)) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.NodeExePath); return; } lock (_syncObject) { _nodeProcess = ProcessOutput.Run( settings.NodeExePath, args, workingDir, null, false, null, false); #if DEBUG frameworkHandle.SendMessage(TestMessageLevel.Informational, "cd " + workingDir); frameworkHandle.SendMessage(TestMessageLevel.Informational, _nodeProcess.Arguments); #endif _nodeProcess.Wait(TimeSpan.FromMilliseconds(500)); if (runContext.IsBeingDebugged && app != null) { try { //the '#ping=0' is a special flag to tell VS node debugger not to connect to the port, //because a connection carries the consequence of setting off --debug-brk, and breakpoints will be missed. string qualifierUri = string.Format("tcp://localhost:{0}#ping=0", port); while (!app.AttachToProcess(_nodeProcess, NodejsRemoteDebugPortSupplierUnsecuredId, qualifierUri)) { if (_nodeProcess.Wait(TimeSpan.FromMilliseconds(500))) { break; } } #if DEBUG } catch (COMException ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee."); frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString()); KillNodeProcess(); } #else } catch (COMException) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Error occurred connecting to debuggee."); KillNodeProcess(); } #endif } }