public void Run() { try { _frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Invoking : {_gaugeProcess}"); _gaugeProcess.Start(); _gaugeProcess.BeginOutputReadLine(); if (_isBeingDebugged) { DTEHelper.AttachToProcess(_gaugeProcess.Id); _frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Attaching to ProcessID {_gaugeProcess.Id}"); } _gaugeProcess.WaitForExit(); } catch (Exception ex) { _frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); foreach (var testCase in _tests) { var result = new TestResult(testCase) { Outcome = TestOutcome.None, ErrorMessage = $"{ex.Message}\n{ex.StackTrace}" }; _frameworkHandle.RecordResult(result); _frameworkHandle.RecordEnd(testCase, result.Outcome); _pendingTests.Remove(testCase); } } }
private void RecordTestResult(TestResult result, Catch2Interface.TestResult interfaceresult) { LogDebug(TestMessageLevel.Informational, $"Testcase result for: {result.TestCase.DisplayName}"); switch (interfaceresult.Outcome) { case Catch2Interface.TestOutcomes.Timedout: LogVerbose(TestMessageLevel.Warning, "Time out"); result.Outcome = TestOutcome.Skipped; result.ErrorMessage = interfaceresult.ErrorMessage; result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, interfaceresult.StandardOut)); result.Duration = interfaceresult.Duration; break; case Catch2Interface.TestOutcomes.Cancelled: result.Outcome = TestOutcome.None; break; case Catch2Interface.TestOutcomes.Skipped: result.Outcome = TestOutcome.Skipped; result.ErrorMessage = interfaceresult.ErrorMessage; break; default: if (interfaceresult.Outcome == Catch2Interface.TestOutcomes.Passed) { result.Outcome = TestOutcome.Passed; } else { result.Outcome = TestOutcome.Failed; } result.Duration = interfaceresult.Duration; result.ErrorMessage = interfaceresult.ErrorMessage; result.ErrorStackTrace = interfaceresult.ErrorStackTrace; if (!string.IsNullOrEmpty(interfaceresult.StandardOut)) { result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, interfaceresult.StandardOut)); } if (!string.IsNullOrEmpty(interfaceresult.StandardError)) { result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, interfaceresult.StandardError)); } if (!string.IsNullOrEmpty(interfaceresult.AdditionalInfo)) { result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, interfaceresult.AdditionalInfo)); } break; } _frameworkHandle.RecordResult(result); LogVerbose(TestMessageLevel.Informational, $"Finished test: {result.TestCase.FullyQualifiedName}"); }
/// <summary> /// Run tests one test at a time and update results back to framework. /// </summary> /// <param name="testBatches">List of test batches to run</param> /// <param name="runContext">Solution properties</param> /// <param name="frameworkHandle">Unit test framework handle</param> private void RunBoostTests(IEnumerable <TestRun> testBatches, IRunContext runContext, IFrameworkHandle frameworkHandle) { BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(runContext); foreach (TestRun batch in testBatches) { if (_cancelled) { break; } DateTimeOffset start = new DateTimeOffset(DateTime.Now); try { Logger.Info("{0}: -> [{1}]", ((runContext.IsBeingDebugged) ? "Debugging" : "Executing"), string.Join(", ", batch.Tests)); using (TemporaryFile report = new TemporaryFile(batch.Arguments.ReportFile)) using (TemporaryFile log = new TemporaryFile(batch.Arguments.LogFile)) using (TemporaryFile stdout = new TemporaryFile(batch.Arguments.StandardOutFile)) using (TemporaryFile stderr = new TemporaryFile(batch.Arguments.StandardErrorFile)) { Logger.Debug("Working directory: {0}", batch.Arguments.WorkingDirectory ?? "(null)"); Logger.Debug("Report file : {0}", batch.Arguments.ReportFile); Logger.Debug("Log file : {0}", batch.Arguments.LogFile); Logger.Debug("StdOut file : {0}", batch.Arguments.StandardOutFile ?? "(null)"); Logger.Debug("StdErr file : {0}", batch.Arguments.StandardErrorFile ?? "(null)"); // Execute the tests if (ExecuteTests(batch, runContext, frameworkHandle)) { foreach (VSTestResult result in GenerateTestResults(batch, start, settings)) { // Identify test result to Visual Studio Test framework frameworkHandle.RecordResult(result); } } } } catch (Boost.Runner.TimeoutException ex) { foreach (VSTestCase testCase in batch.Tests) { VSTestResult testResult = GenerateTimeoutResult(testCase, ex); testResult.StartTime = start; frameworkHandle.RecordResult(testResult); } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while running test batch {0} [{1}] ({2})", batch.Source, string.Join(", ", batch.Tests), ex.Message); } } }
public void WriteTestFailure(MethodInfo m, Exception ex) { var tc = new TestCase(m.Name, _uri, m.DeclaringType.Assembly.Location); _handle.RecordResult(new VsTestResult(tc) { DisplayName = m.Name.Replace("_", " "), Outcome = TestOutcome.Failed, ErrorMessage = ex.Message, ErrorStackTrace = ex.StackTrace }); }
private static void RunTestMethod(IFrameworkHandle frameworkHandle, TestCase testCase, TestRunner runner, TestMethod method) { frameworkHandle.RecordStart(testCase); try { var result = runner.Run(method.Owner, method).GetAwaiter().GetResult(); if (result == null) { frameworkHandle.SendMessage(TestMessageLevel.Warning, "Got no result"); return; } var msResult = new TestResult(testCase) { StartTime = result.StartedAtUtc, EndTime = result.EndedAtUtc, DisplayName = method.Name.Replace("_", " "), Outcome = TestOutcome.Passed, Duration = result.Elapsed, ErrorMessage = result.Exception?.Message, ErrorStackTrace = result.Exception?.StackTrace }; if (result.IsIgnored) { msResult.Outcome = TestOutcome.Skipped; } else if (result.IsSuccess) { msResult.Outcome = TestOutcome.Passed; } else { msResult.Outcome = TestOutcome.Failed; } frameworkHandle.RecordEnd(testCase, msResult.Outcome); frameworkHandle.RecordResult(msResult); } catch (Exception ex) { frameworkHandle.RecordEnd(testCase, TestOutcome.Failed); frameworkHandle.RecordResult(new TestResult(testCase) { DisplayName = method.Name.Replace("_", " "), Outcome = TestOutcome.Failed, ErrorMessage = ex.Message, ErrorStackTrace = ex.StackTrace }); } }
public void CasePassed(PassResult result) { var testCase = new TestCase(result.Case.Name, new Uri(Constants.EXECUTOR_URI_STRING), source); frameworkHandle.RecordStart(testCase); var testResult = new TestResult(testCase) { Outcome = TestOutcome.Passed }; frameworkHandle.RecordEnd(testCase, TestOutcome.Passed); frameworkHandle.RecordResult(testResult); }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">Which tests should be run.</param> /// <param name="context">Context in which to run tests.</param> /// <param param name="framework">Where results should be stored.</param> public void RunTests(IEnumerable <TestCase> tests, IRunContext context, IFrameworkHandle framework) { _state = ExecutorState.Running; foreach (var test in tests) { if (_state == ExecutorState.Cancelling) { _state = ExecutorState.Cancelled; return; } try { var reportDocument = RunOrDebugCatchTest(test.Source, test.FullyQualifiedName, context, framework); var result = GetTestResultFromReport(test, reportDocument); framework.RecordResult(result); } catch (Exception ex) { // Log it and move on. It will show up to the user as a test that hasn't been run. framework.SendMessage(TestMessageLevel.Error, "Exception occured when processing test case: " + test.FullyQualifiedName); framework.SendMessage(TestMessageLevel.Informational, "Message: " + ex.Message + "\nStacktrace:" + ex.StackTrace); } } }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">Tests to be run.</param> /// <param name="runContext">Context to use when executing the tests.</param> /// <param param name="frameworkHandle">Handle to the framework to record results and to do framework operations.</param> public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { m_cancelled = false; try { foreach (TestCase test in tests) { if (m_cancelled) { break; } frameworkHandle.RecordStart(test); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Starting external test for " + test.DisplayName); var testOutcome = RunExternalTest(test, runContext, frameworkHandle, test); frameworkHandle.RecordResult(testOutcome); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Test result:" + testOutcome.Outcome.ToString()); } } catch(Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Exception during test execution: " +e.Message); } }
private void ExecuteTestRun(TestCollection testCollection, IFrameworkHandle frameworkHandle, Dictionary <Test, TestCase> testMapping) { try { _testRun = testCollection.CreateTestRun(); _testRun.TestCompleted += result => { var vsResult = new VisualStudioTestResult(testMapping[result.Test]) { Outcome = OutcomeFromResult(result.Result), Duration = result.Duration, ErrorMessage = result.Output, ErrorStackTrace = GetCombinedStackTrace(result.Exception) }; vsResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, result.Output)); vsResult.ErrorStackTrace = GetCombinedStackTrace(result.Exception); frameworkHandle.RecordResult(vsResult); }; _testRun.ExecuteAsync().GetAwaiter().GetResult(); } catch (Exception ex) { File.WriteAllText("C:/Users/seamillo/Desktop/jazsharp.testadapter.log", ex.ToString()); throw; } }
/// <summary> /// Runs the tests. /// </summary> /// <param name="tests">Tests to be run.</param> /// <param name="runContext">Context to use when executing the tests.</param> /// <param param name="frameworkHandle">Handle to the framework to record results and to do framework operations.</param> public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { //if (Debugger.IsAttached) Debugger.Break(); //else Debugger.Launch(); try { var parsed = XElement.Parse(runContext.RunSettings.SettingsXml); runContext.RunSettings.GetSettings(AppConfig.Name).Load(parsed.Element(AppConfig.Name).CreateReader()); } catch (Exception ex) { Console.WriteLine($"Framework: Error while loading SettingsXml - {ex.Message} {ex.Data}"); } m_cancelled = false; try { foreach (TestCase test in tests) { if (m_cancelled) { break; } frameworkHandle.RecordStart(test); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Starting external test for " + test.DisplayName); var testOutcome = RunExternalTest(test, runContext, frameworkHandle, test); frameworkHandle.RecordResult(testOutcome); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Framework: Test result:" + testOutcome.Outcome.ToString()); frameworkHandle.RecordEnd(test, testOutcome.Outcome); } } catch (Exception e) { frameworkHandle.SendMessage(TestMessageLevel.Error, "Framework: Exception during test execution: " + e.Message); frameworkHandle.SendMessage(TestMessageLevel.Error, "Framework: " + e.StackTrace); } }
public void RunTests( IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { _cancellationTokenSource = new CancellationTokenSource(); foreach (var test in tests) { var configuration = new EngineConfiguration(new string[] { test.Source }, runContext); var testEngine = new Engine(configuration); var outputHandler = new TestAdapterOutputHandler(frameworkHandle); testEngine.SetOutputHandler(outputHandler); var testResult = new TestResult(test); frameworkHandle.RecordStart(test); var kernelTestResult = testEngine.Execute(_cancellationTokenSource.Token).KernelTestResults[0]; testResult.Outcome = kernelTestResult.Result ? TestOutcome.Passed : TestOutcome.Failed; var messages = new Collection <TestResultMessage>(); foreach (var message in outputHandler.Messages) { messages.Add(new TestResultMessage(String.Empty, message)); } frameworkHandle.RecordEnd(test, testResult.Outcome); frameworkHandle.RecordResult(testResult); } }
/// <summary> /// runs a separate ctest call for every testcase /// /// @maybe use -I to run all test cases /// @todo add more metadata to tests! /// </summary> /// <param name="tests"></param> /// <param name="runContext"></param> /// <param name="frameworkHandle"></param> public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { foreach (var test in tests) { if (cancelled) { break; } CTestCase ctest = test; var process = new Process(); process.StartInfo = new ProcessStartInfo() { Arguments = "-I \"" + ctest.Number + "," + ctest.Number + ",," + ctest.Number + "\"", FileName = "ctest", WorkingDirectory = ctest.CMakeBinaryDir, CreateNoWindow = true, RedirectStandardOutput = true, RedirectStandardError = true, UseShellExecute = false, WindowStyle = ProcessWindowStyle.Hidden }; process.Start(); var output = process.StandardOutput.ReadToEnd(); var exitCode = process.ExitCode; var testResult = new TestResult(test); testResult.Outcome = exitCode == 0 ? TestOutcome.Passed : TestOutcome.Failed; frameworkHandle.RecordResult(testResult); } }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { var testCaseLookup = tests .ToDictionary(test => test.ToUnitTest(), test => test); cts = new CancellationTokenSource(); try { var resultObservable = TestRunner.RunTests(testCaseLookup.Select(kvp => kvp.Key)); resultObservable.Do(sunitResult => { UnitTest unitTest = sunitResult.UnitTest; TestCase testCase = testCaseLookup[unitTest]; VSResult vsResult = ConvertToVsResult(testCase, unitTest, sunitResult); frameworkHandle.RecordResult(vsResult); }).ToTask() .Wait(cts.Token); } catch (Exception error) { frameworkHandle.SendMessage(TestMessageLevel.Error, $"Unexpected {error.GetType().FullName}"); frameworkHandle.SendMessage(TestMessageLevel.Error, error.Message); frameworkHandle.SendMessage(TestMessageLevel.Error, error.StackTrace); } }
private static void RecordEnd(IFrameworkHandle frameworkHandle, TestResult result, string stdout, string stderr, TestOutcome outcome, TP.ResultEvent resultInfo) { result.EndTime = DateTimeOffset.Now; result.Duration = TimeSpan.FromSeconds(resultInfo.durationInSecs); result.Outcome = outcome; // Replace \n with \r\n to be more friendly when copying output... stdout = stdout.Replace("\r\n", "\n").Replace("\n", "\r\n"); stderr = stderr.Replace("\r\n", "\n").Replace("\n", "\r\n"); result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, stdout)); result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, stderr)); result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, stderr)); if (resultInfo.traceback != null) { result.ErrorStackTrace = resultInfo.traceback; result.Messages.Add(new TestResultMessage(TestResultMessage.DebugTraceCategory, resultInfo.traceback)); } if (resultInfo.message != null) { result.ErrorMessage = resultInfo.message; } frameworkHandle.RecordResult(result); frameworkHandle.RecordEnd(result.TestCase, outcome); }
private void RunTests(IEnumerable <TestCase> tests) { _executor.InitTestRuns(); foreach (var test in tests) { if (_cancelled) { break; } _frameworkHandle.RecordStart(test); var result = RunTest(test); _frameworkHandle.RecordResult(result); } }
public void NotifyEnd(string testFullName, TestResultShim testResult) { var test = _tests[testFullName]; var result = new TestResult(test) { Outcome = MapToOutcome(testResult), DisplayName = testFullName }; if (result.Outcome == TestOutcome.Failed) { result.ErrorMessage = testResult.FailureReason; result.ErrorStackTrace = testResult.FailureStackTrace; } else if (result.Outcome == TestOutcome.Skipped) { // TODO: can we include the reason skipped in VS output somehow? result.Messages.Add( new TestResultMessage("ReasonSkipped", testResult.ReasonSkipped)); } _frameworkHandle.RecordEnd(test, result.Outcome); _frameworkHandle.RecordResult(result); }
private void RunTestGroup( IGrouping <PythonProjectSettings, TestCase> testGroup, IRunContext runContext, IFrameworkHandle frameworkHandle ) { PythonProjectSettings settings = testGroup.Key; if (settings == null || settings.TestFramework != TestFrameworkType.Pytest) { return; } var testConfig = new PytestConfiguration(runContext); using (var executor = new ExecutorService( testConfig, settings, frameworkHandle, runContext) ) { executor.Run(testGroup, _cancelRequested); } var testResults = ParseResults(testConfig.ResultsXmlPath, testGroup, frameworkHandle); foreach (var result in testResults) { if (_cancelRequested.WaitOne(0)) { break; } frameworkHandle.RecordResult(result); } }
private async Task Run(TestCase testCase, DiscoveredTestData testData, TestRunContext testRunContext, StepBinder stepBinder, IFrameworkHandle frameworkHandle, SemaphoreSlim simultaneousTestCasesSemaphore) { await simultaneousTestCasesSemaphore .WaitAsync() .ConfigureAwait(false); try { frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Starting test \"{testCase.DisplayName}\""); frameworkHandle.RecordStart(testCase); var executor = stepsExecutorFactory(stepBinder); var testResult = await executor .Execute(testCase, testData, testRunContext, frameworkHandle) .ConfigureAwait(false); // https://github.com/Microsoft/vstest/blob/master/src/Microsoft.TestPlatform.CrossPlatEngine/Adapter/TestExecutionRecorder.cs <- comments here seem to suggest that we need to call RecordEnd just before RecordResult frameworkHandle.RecordEnd(testCase, testResult.Outcome); frameworkHandle.RecordResult(testResult); frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Finished test \"{testCase.DisplayName}\""); } finally { simultaneousTestCasesSemaphore.Release(); } }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { // more on filtering // https://github.com/nunit/nunit3-vs-adapter/blob/master/src/NUnitTestAdapter/VsTestFilter.cs List <string> supportedProperties = new List <string>(); supportedProperties.Add("FullyQualifiedName"); ITestCaseFilterExpression fe = runContext.GetTestCaseFilter(supportedProperties, PropertyProvider); log.Debug("Run settings:\n" + runContext.RunSettings.SettingsXml); log.Debug("RunTests from Test Cases"); foreach (TestCase tc in tests) { if (fe == null || fe.MatchTestCase(tc, p => PropertyValueProvider(tc, p))) { log.Debug("Run test case: " + tc.FullyQualifiedName + " / " + tc.Id); frameworkHandle.RecordStart(tc); DateTime startTime = DateTime.Now; TestResult tr = runner.RunTest(tc, runContext); DateTime endTime = DateTime.Now; tr.Duration = endTime - startTime; frameworkHandle.RecordEnd(tc, tr.Outcome); frameworkHandle.RecordResult(tr); } else { log.Debug("Test case filtered out: " + tc.FullyQualifiedName + " / " + tc.Id); } } }
private static void RunLocally(IFrameworkHandle frameworkHandle, IGrouping <string, TestCase> source) { var discoverer = new TestDiscoverer(); var tests2 = discoverer.LoadFromSources(new[] { source.Key }, x => frameworkHandle.SendMessage(TestMessageLevel.Informational, x)); if (!tests2.Any()) { return; } var runner = new TestRunner(discoverer); runner.Load(new[] { tests2[0].Type.Assembly }).GetAwaiter().GetResult(); foreach (var testCase in source) { var testClassName = (string)testCase.GetPropertyValue(Constants.TestClassProperty); var testName = (string)testCase.GetPropertyValue(Constants.TestMethodProperty); var method = discoverer.Get(testClassName, testName); if (method == null) { frameworkHandle.RecordResult(new TestResult(testCase) { Outcome = TestOutcome.NotFound }); continue; } RunTestMethod(frameworkHandle, testCase, runner, method); } runner.Shutdown().GetAwaiter().GetResult(); }
/// <summary> /// Runs the tests. /// </summary> /// <param name="testBinaries">Where to look for tests to be run.</param> /// <param name="context">Context in which to run tests.</param> /// <param param name="framework">Where results should be stored.</param> public void RunTests(IEnumerable<string> testBinaries, IRunContext context, IFrameworkHandle framework) { _state = ExecutorState.Running; foreach (var testBinary in testBinaries) { if (_state == ExecutorState.Cancelling) { _state = ExecutorState.Cancelled; return; } var reportDocument = RunOrDebugCatchTest(testBinary, "*", context, framework); var tests = CatchTestDiscoverer.ListTestsInBinary(testBinary); foreach (var test in tests) { try { var result = GetTestResultFromReport(test, reportDocument, framework); framework.RecordResult(result); } catch (Exception ex) { // Log it and move on. It will show up to the user as a test that hasn't been run. framework.SendMessage(TestMessageLevel.Error, "Exception occured when processing test source: " + test.FullyQualifiedName); framework.SendMessage(TestMessageLevel.Informational, "Message: " + ex.Message + "\nStacktrace:" + ex.StackTrace); } } } }
private void RunTestGroup( IGrouping <PythonProjectSettings, TestCase> testGroup, IRunContext runContext, IFrameworkHandle frameworkHandle ) { PythonProjectSettings settings = testGroup.Key; if (settings == null || settings.TestFramework != TestFrameworkType.Pytest) { return; } using (var executor = new ExecutorService(settings, frameworkHandle, runContext)) { bool codeCoverage = ExecutorService.EnableCodeCoverage(runContext); string covPath = null; if (codeCoverage) { covPath = ExecutorService.GetCoveragePath(testGroup); } var resultsXML = executor.Run(testGroup, covPath, _cancelRequested); // Default TestResults var pytestIdToResultsMap = testGroup.Select(tc => new TestResult(tc) { Outcome = TestOutcome.Skipped }) .ToDictionary(tr => tr.TestCase.GetPropertyValue <string>(Pytest.Constants.PytestIdProperty, String.Empty), tr => tr); if (File.Exists(resultsXML)) { try { var doc = JunitXmlTestResultParser.Read(resultsXML); Parse(doc, pytestIdToResultsMap, frameworkHandle); } catch (Exception ex) { frameworkHandle.SendMessage(TestMessageLevel.Error, ex.Message); } } else { frameworkHandle.SendMessage(TestMessageLevel.Error, Strings.PytestResultsXmlNotFound.FormatUI(resultsXML)); } foreach (var result in pytestIdToResultsMap.Values) { if (_cancelRequested.WaitOne(0)) { break; } frameworkHandle.RecordResult(result); } if (codeCoverage) { ExecutorService.AttachCoverageResults(frameworkHandle, covPath); } } }
public static void MarkAsFailed(this TestCase testCase, IFrameworkHandle frameworkHandle, string errorMessage, string errorStackTrace) => frameworkHandle.RecordResult( new TestResult(testCase) { Outcome = TestOutcome.Failed, ErrorMessage = errorMessage, ErrorStackTrace = errorStackTrace });
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { foreach(var source in tests.GroupBy(x => x.Source, x => x.FullyQualifiedName)) { var xDomainSink = new TestAdapterLogger(frameworkHandle, source.Key); xDomainSink.OnSuccess += (_, e) => frameworkHandle.RecordResult(new TestResult(e.TestCase) { Outcome = TestOutcome.Passed, Duration = e.Duration, }); xDomainSink.OnPending += (_, e) => frameworkHandle.RecordResult(new TestResult(e.TestCase) { Outcome = TestOutcome.Skipped, Duration = e.Duration }); xDomainSink.OnFailure += (_, e) => frameworkHandle.RecordResult(new TestResult(e.TestCase) { Outcome = TestOutcome.Failed, Duration = e.Duration, ErrorMessage = e.ErrorMessage, ErrorStackTrace = e.ErrorStackTrace, }); CrossDomainConeRunner.WithProxyInDomain<ConeTestAdapterProxy, int>(string.Empty, new [] { source.Key, }, proxy => proxy.RunTests(source.Key, xDomainSink, source.ToArray()) ); } }
private void RunTests(string source, IRunContext runContext, IFrameworkHandle frameworkHandle) { foreach (var result in ExternalTestExecutor.GetTestResults(source, null).Select(c => CreateTestResult(source, c))) { frameworkHandle.RecordStart(result.TestCase); frameworkHandle.RecordResult(result); frameworkHandle.RecordEnd(result.TestCase, result.Outcome); } }
private void ReportTestResult(TestResult testResult) { VsTestResult result = testResult.ToVsTestResult(); if (TestReportingNeedsToBeThrottled()) { _throttle.Execute(delegate { // This is part of a workaround for a Visual Studio bug. See above. _frameworkHandle.RecordResult(result); }); } else { _frameworkHandle.RecordResult(result); } _frameworkHandle.RecordEnd(result.TestCase, result.Outcome); }
public override void TestFinished(TestCase test) { var testCase = test.ToVsTestCase(); var result = test.ToVsTestResult(); var outcome = ChutzpahExtensionMethods.ToVsTestOutcome(test.Passed); frameworkHandle.RecordResult(result); // The test case is done frameworkHandle.RecordEnd(testCase, outcome); }
public void AddResult(TestResult testResult) { var testCase = FindTestCase(testResult.TestCase.FullyQualifiedName); _frameworkHandle.RecordResult(new Microsoft.VisualStudio.TestPlatform.ObjectModel.TestResult(testCase) { ErrorStackTrace = testResult.ErrorStackTrace, ErrorMessage = testResult.ErrorMessage, Outcome = ConvertOutcome(testResult.Result), Duration = testResult.Duration }); }
public void Run(TestRunnerContext testRunnerContext) { var testCase = testRunnerContext.TestCase; var testResult = new TestResult(testCase); IFrameworkHandle frameworkHandle = testRunnerContext.FrameworkHandle; if (testRunnerContext.TestCancelled) { testResult.Outcome = TestOutcome.Skipped; frameworkHandle.RecordResult(testResult); return; } var stopwatch = Stopwatch.StartNew(); try { testRunnerContext.Method.Invoke(testRunnerContext.Instance, null); testResult.Outcome = TestOutcome.Passed; } catch (TargetInvocationException e) { Console.WriteLine("OOOPS"); testResult.ErrorMessage = e.InnerException?.Message; testResult.Outcome = TestOutcome.Failed; } finally { stopwatch.Stop(); testResult.Duration = stopwatch.Elapsed; frameworkHandle.RecordResult(testResult); frameworkHandle.RecordEnd(testCase, testResult.Outcome); } }
public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { m_cancelled = false; var connectionString = new RunSettings(runContext.RunSettings).GetSetting("TestDatabaseConnectionString"); if (String.IsNullOrEmpty(connectionString)) { frameworkHandle.SendMessage(TestMessageLevel.Error, @"No connection string found. You need to specify a run setting with the name ""TestDatabaseConnectionString"". Create a .runsettings file a sample is: <?xml version=""1.0"" encoding=""utf-8""?> <RunSettings> <!-- Parameters used by tests at runtime --> <TestRunParameters> <Parameter name=""TestDatabaseConnectionString"" value=""server=Servername;initial catalog=UnitTestDatabase;integrated security=sspi"" /> <!-- If you have a large project then to speed up discovery, use this to limit which .sql files are parsed. If all your tests are in a subfolder called \UnitTests\ or \Our-UnitTests\ then set the value to UnitTests - it is a regex so U.i.T.s.s will also work. <!-- <Parameter name=""IncludePath"" value=""RegexToTestsToInclude"" /> --> </TestRunParameters> </RunSettings> If you are running tests in visual studio choose ""Test-->Test Settings-->Select Test Settings File--> Choose your .runsettings file"", if you are using the command line pass the runsettings files using /Settings:PathTo.runsettings "); return; } foreach (TestCase test in tests) { if (m_cancelled) { break; } var testResult = new TestResult(test); var testSession = new tSQLtTestRunner(connectionString); var result = Run(testSession, test); if (null == result) { continue; } testResult.Outcome = result.Passed() ? TestOutcome.Passed : TestOutcome.Failed; testResult.ErrorMessage += result.FailureMessages(); frameworkHandle.RecordResult(testResult); } }
private async Task RunTestsAsync( string source, IEnumerable <Test> tests, IFrameworkHandle frameworkHandle, SimpleTestExecutor testExecutor, TestResultConverter testResultConverter ) { await testExecutor.RunTestsAsync( tests, (test, result) => frameworkHandle.RecordResult( testResultConverter.Convert(source, test, result) ) ); }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { mCancelled = false; foreach (TestCase test in tests) { if (mCancelled) break; var testResult = new TestResult(test); testResult.Outcome = (TestOutcome)test.GetPropertyValue(TestResultProperties.Outcome); frameworkHandle.RecordResult(testResult); } }
/// <summary> /// Processes the results. /// </summary> /// <param name="tests">The tests.</param> /// <param name="argument">The argument.</param> /// <param name="frameworkHandle">The framework handle.</param> internal void ProcessResults(IEnumerable <TestCase> tests, TestCaseArgument argument, IFrameworkHandle frameworkHandle) { TrxSchemaReader reader = new TrxSchemaReader(this.logger, tests); TestRunType testRun = reader.Read(argument.TestRunOptions.ReportOutputPath); if (testRun == null) { return; } foreach (TrxResult result in reader.ProcessStatLightResult(testRun)) { TestResult testResult = result.GetTestResult(this.logger); frameworkHandle.RecordResult(testResult); frameworkHandle.RecordEnd(result.TestCase, testResult.Outcome); } }
/// <summary> /// Entry point for run single /// </summary> /// <param name="tests"></param> /// <param name="runContext"></param> /// <param name="frameworkHandle"></param> public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { //System.Diagnostics.Debugger.Launch(); var groups = tests.GroupBy(x => x.Source, x => x, (x, y) => new { Source = x, TestCases = y }).ToList(); var results = new List<DefinitionSource>(); foreach (var groupedItem in groups) { using (var sandbox = new Sandbox<Executor>(groupedItem.Source)) { var targetTypes = groupedItem.TestCases .Select(x => new DefinitionSource() { ClassName = new Uri(GetSpecID(x)).Host }).ToArray(); var result = sandbox.Content.Execute(targetTypes); results.AddRange(result); } } var joinedList = from r in results join tc in groups.SelectMany(x => x.TestCases) on r.Id equals GetSpecID(tc) select new { TestResult = r, TestCase = tc }; foreach (var resultItem in joinedList) { var testResult = new TestResult(resultItem.TestCase); if (resultItem.TestResult.Enabled) { testResult.DisplayName = resultItem.TestResult.Description; testResult.Outcome = resultItem.TestResult.RanSuccesfully ? TestOutcome.Passed : TestOutcome.Failed; testResult.Duration = resultItem.TestResult.EndTime - resultItem.TestResult.StartTime; testResult.ErrorStackTrace = resultItem.TestResult.StackTrace; } else { testResult.Outcome = TestOutcome.Skipped; } testResult.ErrorMessage = resultItem.TestResult.ExecutionResult; ; frameworkHandle.RecordResult(testResult); } }
private void RunTestsWithJasmine(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle, LoggerHelper log) { _canceled = false; foreach (var test in tests) { if (_canceled) { break; } var testResult = new TestResult(test) { Outcome = (TestOutcome)test.GetPropertyValue(TestResultProperties.Outcome), Duration = (TimeSpan)test.GetPropertyValue(TestResultProperties.Duration) }; frameworkHandle.RecordResult(testResult); } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { IEnumerable<KarmaTestResult> results = GetTestResults(runContext); foreach (var test in tests) { var result = results.FirstOrDefault(x => x.Name.EndsWith(test.DisplayName)); if(result != null) { TestOutcome testOutcome = ResultTestOutcome(result); frameworkHandle.RecordResult(new TestResult(test) { Outcome = testOutcome }); } } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { _cancelled = false; foreach (TestCase test in tests) { if (_cancelled) break; var testResult = new TestResult(test); string fileName = test.Source; string testName = test.DisplayName; RunFileOrTest(frameworkHandle, runContext, fileName, testName); testResult.Outcome = TestOutcome.Passed; frameworkHandle.RecordResult(testResult); } }
private static void GenericFailTest(IFrameworkHandle frameworkHandle, string fileName, string testName, string message = null) { var testCase = new TestCase(testName, NodeUnitTestExecutor.ExecutorUri, fileName) { DisplayName = testName }; var testResult = new TestResult(testCase) { DisplayName = testName }; testResult.Outcome = TestOutcome.Failed; testResult.ErrorMessage = message; frameworkHandle.SendMessage(TestMessageLevel.Informational, "Recording Result for " + testCase.DisplayName + " (" + testResult.Outcome.ToString() + ")"); frameworkHandle.RecordResult(testResult); }
/// <summary> /// Run tests one test at a time and update results back to framework. /// </summary> /// <param name="testBatches">List of test batches to run</param> /// <param name="runContext">Solution properties</param> /// <param name="frameworkHandle">Unit test framework handle</param> private void RunBoostTests(IEnumerable<TestRun> testBatches, IRunContext runContext, IFrameworkHandle frameworkHandle) { BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(runContext); foreach (TestRun batch in testBatches) { if (_cancelled) { break; } DateTimeOffset start = new DateTimeOffset(DateTime.Now); try { Logger.Info("{0}: -> [{1}]", ((runContext.IsBeingDebugged) ? "Debugging" : "Executing"), string.Join(", ", batch.Tests)); using (TemporaryFile report = new TemporaryFile(batch.Arguments.ReportFile)) using (TemporaryFile log = new TemporaryFile(batch.Arguments.LogFile)) using (TemporaryFile stdout = new TemporaryFile(batch.Arguments.StandardOutFile)) using (TemporaryFile stderr = new TemporaryFile(batch.Arguments.StandardErrorFile)) { Logger.Debug("Working directory: {0}", batch.Arguments.WorkingDirectory ?? "(null)"); Logger.Debug("Report file : {0}", batch.Arguments.ReportFile); Logger.Debug("Log file : {0}", batch.Arguments.LogFile); Logger.Debug("StdOut file : {0}", batch.Arguments.StandardOutFile ?? "(null)"); Logger.Debug("StdErr file : {0}", batch.Arguments.StandardErrorFile ?? "(null)"); Logger.Debug("CmdLine arguments: {0}", batch.Arguments.ToString() ?? "(null)"); // Execute the tests if (ExecuteTests(batch, runContext, frameworkHandle)) { foreach (VSTestResult result in GenerateTestResults(batch, start, settings)) { // Identify test result to Visual Studio Test framework frameworkHandle.RecordResult(result); } } } } catch (Boost.Runner.TimeoutException ex) { foreach (VSTestCase testCase in batch.Tests) { VSTestResult testResult = GenerateTimeoutResult(testCase, ex); testResult.StartTime = start; frameworkHandle.RecordResult(testResult); } } catch (Exception ex) { Logger.Exception(ex, "Exception caught while running test batch {0} [{1}] ({2})", batch.Source, string.Join(", ", batch.Tests), ex.Message); } } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { foreach (var test in tests) { if (this.canceled) { return; } var result = new TestResult(test); var target = System.IO.Path.ChangeExtension(test.Source, ".feature"); try { System.IO.File.Copy(test.Source, target); var appDataPath = Environment.GetEnvironmentVariable("APPDATA"); var nodePath = System.IO.Path.Combine(appDataPath, "npm"); var cucumberPath = System.IO.Path.Combine(nodePath, "node_modules\\cucumber\\bin\\cucumber.js"); System.Diagnostics.ProcessStartInfo procStartInfo = runContext.IsBeingDebugged ? new System.Diagnostics.ProcessStartInfo("node", $"--debug=5858 \"{cucumberPath}\" \"{target}:{test.LineNumber}\" -f json") : new System.Diagnostics.ProcessStartInfo("node", $"\"{cucumberPath}\" \"{target}:{test.LineNumber}\" -f json"); // The following commands are needed to redirect the standard output. // This means that it will be redirected to the Process.StandardOutput StreamReader. procStartInfo.RedirectStandardOutput = true; procStartInfo.RedirectStandardError = true; procStartInfo.UseShellExecute = false; procStartInfo.CreateNoWindow = true; System.Diagnostics.Process proc = new System.Diagnostics.Process(); proc.StartInfo = procStartInfo; proc.Start(); if (runContext.IsBeingDebugged) { DteHelpers.DebugAttachToNode(proc.Id, 5678); } proc.WaitForExit(); var error = proc.StandardError.ReadToEnd(); var output = proc.StandardOutput.ReadToEnd(); var features = JArray.Parse(output); // frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, output); foreach (var feature in features) { frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{feature["keyword"]}: {feature["name"]}"); foreach (var element in feature["elements"]) { frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{element["keyword"]}: {element["name"]}"); frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{element["description"]}"); bool passed = true; var duration = 0L; foreach (var step in element["steps"]) { var message = $"{step["keyword"]}{step["name"]}"; duration = duration + (long)step["result"]["duration"]; frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, message); if ((string)step["result"]["status"] == "failed") { result.ErrorMessage = (string)step["result"]["error_message"]; frameworkHandle.SendMessage(Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging.TestMessageLevel.Informational, $"{result.ErrorMessage}"); passed = false; } } result.Duration = TimeSpan.FromTicks(duration); if (passed) { result.Outcome = TestOutcome.Passed; } else { result.Outcome = TestOutcome.Failed; } } } } catch (Exception ex) { result.Outcome = TestOutcome.Failed; result.ErrorMessage = ex.Message + ex.StackTrace; } finally { System.IO.File.Delete(target); } frameworkHandle.RecordResult(result); } }
public void RunTests(IEnumerable<TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { _mCancelled = false; SetupExecutionPolicy(); foreach (var test in tests) { if (_mCancelled) break; var testFramework = test.FullyQualifiedName.Split(new[] { "||" }, StringSplitOptions.None)[0]; var executor = _testExecutors.FirstOrDefault( m => m.TestFramework.Equals(testFramework, StringComparison.OrdinalIgnoreCase)); if (executor == null) { frameworkHandle.SendMessage(TestMessageLevel.Error, String.Format("Unknown test executor: {0}", testFramework)); return; } var testResult = new TestResult(test); testResult.Outcome = TestOutcome.Failed; testResult.ErrorMessage = "Unexpected error! Failed to run tests!"; PowerShellTestResult testResultData = null; var testOutput = new StringBuilder(); try { var testAdapter = new TestAdapterHost(); testAdapter.HostUi.OutputString = s => testOutput.Append(s); var runpsace = RunspaceFactory.CreateRunspace(testAdapter); runpsace.Open(); using (var ps = PowerShell.Create()) { ps.Runspace = runpsace; testResultData = executor.RunTest(ps, test, runContext); } } catch (Exception ex) { testResult.Outcome = TestOutcome.Failed; testResult.ErrorMessage = ex.Message; testResult.ErrorStackTrace = ex.StackTrace; } if (testResultData != null) { testResult.Outcome = testResultData.Outcome; testResult.ErrorMessage = testResultData.ErrorMessage; testResult.ErrorStackTrace = testResultData.ErrorStacktrace; } if (testOutput.Length > 0) { frameworkHandle.SendMessage(TestMessageLevel.Informational, testOutput.ToString()); } frameworkHandle.RecordResult(testResult); } }
/// <summary> /// Run tests one test at a time and update results back to framework. /// </summary> /// <param name="testBatches">List of test batches to run</param> /// <param name="runContext">Solution properties</param> /// <param name="frameworkHandle">Unit test framework handle</param> private void RunBoostTests(IEnumerable<TestRun> testBatches, IRunContext runContext, IFrameworkHandle frameworkHandle) { BoostTestAdapterSettings settings = BoostTestAdapterSettingsProvider.GetSettings(runContext); foreach (TestRun batch in testBatches) { if (_cancelled) { break; } DateTimeOffset start = new DateTimeOffset(DateTime.Now); try { Logger.Info("{0}: -> [{1}]", ((runContext.IsBeingDebugged) ? "Debugging" : "Executing"), string.Join(", ", batch.Tests)); CleanOutput(batch.Arguments); // Execute the tests if (ExecuteTests(batch, runContext, frameworkHandle)) { foreach (VSTestResult result in GenerateTestResults(batch, start, settings)) { // Identify test result to Visual Studio Test framework frameworkHandle.RecordResult(result); } } } catch (Boost.Runner.TimeoutException ex) { foreach (VSTestCase testCase in batch.Tests) { VSTestResult testResult = GenerateTimeoutResult(testCase, ex); testResult.StartTime = start; frameworkHandle.RecordResult(testResult); } } catch (Exception ex) { Logger.Error("Exception caught while running test batch {0} [{1}] ({2})", batch.Source, string.Join(", ", batch.Tests), ex.Message); } } }
public void RunTests(IEnumerable<Microsoft.VisualStudio.TestPlatform.ObjectModel.TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle) { var olock = new Object(); var cache = new Dictionary<string, string>(); Parallel.ForEach(tests, test => { var result = new TestResult(test); // full path to temporary file string filePath = Path.ChangeExtension(Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N") + DateTime.Now.ToString("HH-mm-ss-fff")), ".js"); try { lock (olock) { if (!cache.ContainsKey(test.CodeFilePath)) { TypeScriptCompiler.Compile(test.CodeFilePath, new TypeScriptCompiler.Options(outPath: filePath)); cache.Add(test.CodeFilePath, filePath); } else { filePath = cache[test.CodeFilePath]; } } var testResult = new JSRunner.TestResult(); var scriptFilePath = filePath + Guid.NewGuid().ToString("N") + "exec.js"; using (var fs = new FileStream(scriptFilePath, FileMode.Create)) { using (var sw = new StreamWriter(fs)) { sw.WriteLine("try{"); sw.Write(File.ReadAllText(filePath)); var className = test.FullyQualifiedName.Substring(0, test.FullyQualifiedName.LastIndexOf(".")); var methodName = test.FullyQualifiedName.Substring(test.FullyQualifiedName.LastIndexOf(".") + 1); sw.WriteLine("var ____TSTestExecutor____ = new " + className + "();____TSTestExecutor____." + methodName + "();"); sw.WriteLine("phantom.exit(0)}catch(ex){console.log(ex);phantom.exit(-1)}"); sw.Flush(); } } testResult = JSRunner.Run(scriptFilePath); result.Outcome = testResult.Outcome; if (result.Outcome != TestOutcome.Passed) { result.ErrorMessage = testResult.Output; } try { File.Delete(scriptFilePath); } catch { } } catch (InvalidTypeScriptFileException ex) { result.Outcome = TestOutcome.Failed; result.ErrorMessage = ex.Message; } catch (Exception ex) { result.Outcome = TestOutcome.Failed; result.ErrorMessage = ex.Message + ex.StackTrace; } frameworkHandle.RecordResult(result); }); foreach (KeyValuePair<string, string> item in cache) { try { File.Delete(item.Value); } catch { } } }
private void RunTest(TestSourceSettings settings, ITestLogger logger, IRunContext runContext, IFrameworkHandle frameworkHandle, Spec spec) { var testCase = CreateTestCase(settings, spec); var outcome = TestOutcome.None; frameworkHandle.RecordStart(testCase); foreach (var result in spec.Results) { if (result.Skipped && outcome != TestOutcome.Failed) { outcome = TestOutcome.Skipped; } if (result.Success && outcome == TestOutcome.None) { outcome = TestOutcome.Passed; } if (!result.Success && !result.Skipped) { outcome = TestOutcome.Failed; } frameworkHandle.RecordResult(GetResult(testCase, result, frameworkHandle)); } frameworkHandle.RecordEnd(testCase, outcome); }
private static void RunFileOrTest(IFrameworkHandle frameworkHandle, IRunContext runContext, string fileName, string testName = null) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "runContext.SolutionDirectory: " + runContext.SolutionDirectory); frameworkHandle.SendMessage(TestMessageLevel.Informational, "runContext.TestRunDirectory: " + runContext.TestRunDirectory); frameworkHandle.SendMessage(TestMessageLevel.Informational, "source: " + fileName); string nodeFullPath = NodeJsHelper.LocateNodeJs(); Process proc = new Process(); proc.StartInfo.FileName = nodeFullPath; proc.StartInfo.WorkingDirectory = Path.GetDirectoryName(fileName); proc.StartInfo.UseShellExecute = false; proc.StartInfo.RedirectStandardInput = true; proc.StartInfo.RedirectStandardOutput = true; proc.StartInfo.RedirectStandardError = true; proc.StartInfo.CreateNoWindow = true; proc.OutputDataReceived += (sender, args) => { var data = args.Data; if (!string.IsNullOrEmpty(data)) { frameworkHandle.SendMessage(TestMessageLevel.Informational, "> " + data); if (data.Contains("Error: Cannot find module 'nodeunit'")) { if (!string.IsNullOrEmpty(testName)) { GenericFailTest(frameworkHandle, fileName, testName, data); } } else { try { var result = JsonConvert.DeserializeObject<NodeUnitTestResult>(data); if (result != null && !string.IsNullOrEmpty(result.TestName)) { var testCase = new TestCase(result.TestName, NodeUnitTestExecutor.ExecutorUri, fileName) { DisplayName = result.TestName }; var testResult = new TestResult(testCase) { DisplayName = result.TestName }; testResult.Duration = TimeSpan.FromSeconds(Math.Max(.001, result.Duration)); testResult.Outcome = result.Passed ? TestOutcome.Passed : TestOutcome.Failed; if (result.Assertions.Length > 0) { var first = result.Assertions.First(); testResult.ErrorStackTrace = FormatStackTrace(first.Stack); testResult.ErrorMessage = first.Message; } frameworkHandle.SendMessage(TestMessageLevel.Informational, "Recording Result for " + testCase.DisplayName + " (" + testResult.Outcome.ToString() + ")"); frameworkHandle.RecordResult(testResult); } } catch (Newtonsoft.Json.JsonException) { //frameworkHandle.SendMessage(TestMessageLevel.Informational, data); } } } }; proc.ErrorDataReceived += (sender, args) => { if (!string.IsNullOrEmpty(args.Data)) { frameworkHandle.SendMessage(TestMessageLevel.Warning, "^ " + args.Data); if (args.Data.Contains("Error: Cannot find module 'nodeunit'")) { if (!string.IsNullOrEmpty(testName)) { GenericFailTest(frameworkHandle, fileName, testName, args.Data); } } } }; frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process FileName: " + proc.StartInfo.FileName); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process Arguments: " + proc.StartInfo.Arguments); frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process WorkingDirectory: " + proc.StartInfo.WorkingDirectory); proc.Start(); proc.BeginOutputReadLine(); proc.BeginErrorReadLine(); proc.StandardInput.Write(Resources.RunTests); string testFile = Path.GetFileName(fileName).Replace("\\", "\\\\"); string jsCommand = "runTests(\"" + testFile + "\""; if (!string.IsNullOrEmpty(testName)) jsCommand += ", \"" + testName + "\""; jsCommand += ");"; frameworkHandle.SendMessage(TestMessageLevel.Informational, "Process Emitting Command: " + jsCommand); proc.StandardInput.Write(jsCommand); proc.StandardInput.Close(); proc.WaitForExit(); }