public override ICollection Load(string location, ProjectData projectData, IWarningHandler warningHandler) { // Skip loading if the extension is not fully initalized unless we are not // running in Visual Studio (because we are running in MSTest instead). if (!TipShellExtension.IsInitialized && ShellEnvironment.IsRunningInVisualStudio) { return(EmptyArray <TestElement> .Instance); } // Explore the tests. ITestFrameworkManager testFrameworkManager = RuntimeAccessor.ServiceLocator.Resolve <ITestFrameworkManager>(); WarningLogger logger = new WarningLogger(warningHandler); ReflectionOnlyAssemblyLoader loader = new ReflectionOnlyAssemblyLoader(); loader.AddHintDirectory(Path.GetDirectoryName(location)); IAssemblyInfo assembly = loader.ReflectionPolicy.LoadAssemblyFrom(location); var testFrameworkSelector = new TestFrameworkSelector() { Filter = testFrameworkHandle => testFrameworkHandle.Id != "MSTestAdapter.TestFramework", FallbackMode = TestFrameworkFallbackMode.Approximate }; ITestDriver driver = testFrameworkManager.GetTestDriver(testFrameworkSelector, logger); TestExplorationOptions testExplorationOptions = new TestExplorationOptions(); ArrayList tests = new ArrayList(); MessageConsumer messageConsumer = new MessageConsumer() .Handle <TestDiscoveredMessage>(message => { if (message.Test.IsTestCase) { tests.Add(GallioTestElementFactory.CreateTestElement(message.Test, location, projectData)); } }) .Handle <AnnotationDiscoveredMessage>(message => { message.Annotation.Log(logger, true); }); driver.Describe(loader.ReflectionPolicy, new ICodeElementInfo[] { assembly }, testExplorationOptions, messageConsumer, NullProgressMonitor.CreateInstance()); return(tests); }
private void RunTests(bool debug, ITestRunner testRunner, IProgressMonitor progressMonitor) { var testPackageCopy = testPackage.Copy(); testPackageCopy.DebuggerSetup = debug ? new DebuggerSetup() : null; var testExplorationOptions = new TestExplorationOptions(); var filterSet = filterService.GenerateFilterSetFromSelectedTests(); var testExecutionOptions = new TestExecutionOptions { FilterSet = filterSet }; testRunner.Run(testPackageCopy, testExplorationOptions, testExecutionOptions, progressMonitor.CreateSubProgressMonitor(85)); }
public void Explore(IProgressMonitor progressMonitor, IEnumerable <string> testRunnerExtensions) { using (progressMonitor.BeginTask("Exploring the tests", 100)) { eventAggregator.Send(this, new ExploreStarted()); DoWithTestRunner(testRunner => { var testExplorationOptions = new TestExplorationOptions(); testRunner.Explore(testPackage, testExplorationOptions, progressMonitor.CreateSubProgressMonitor(80)); RefreshTestTree(progressMonitor.CreateSubProgressMonitor(10)); }, progressMonitor, 10, testRunnerExtensions); eventAggregator.Send(this, new ExploreFinished()); } }
private static void ExploreOrRun(TestPackage testPackage, ScriptRuntimeSetup scriptRuntimeSetup, string testDriverScriptPath, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor, ILogger logger) { using (BufferedLogWriter outputWriter = new BufferedLogWriter(logger, LogSeverity.Info, Encoding.Default), errorWriter = new BufferedLogWriter(logger, LogSeverity.Error, Encoding.Default)) { using (var queuedMessageSink = new QueuedMessageSink(messageSink)) { using (new ConsoleRedirection(outputWriter, errorWriter)) { var scriptRuntime = new ScriptRuntime(scriptRuntimeSetup); scriptRuntime.IO.SetInput(Stream.Null, TextReader.Null, Encoding.Default); scriptRuntime.IO.SetOutput(new TextWriterStream(outputWriter), outputWriter); scriptRuntime.IO.SetErrorOutput(new TextWriterStream(errorWriter), errorWriter); try { var scriptParameters = new Dictionary <string, object>(); scriptParameters.Add("Verb", testExecutionOptions != null ? "Run" : "Explore"); scriptParameters.Add("TestPackage", testPackage); scriptParameters.Add("TestExplorationOptions", testExplorationOptions); scriptParameters.Add("TestExecutionOptions", testExecutionOptions); scriptParameters.Add("MessageSink", queuedMessageSink); scriptParameters.Add("ProgressMonitor", progressMonitor); scriptParameters.Add("Logger", logger); scriptRuntime.Globals.SetVariable(ScriptParametersVariableName, scriptParameters); RunScript(scriptRuntime, testDriverScriptPath); } finally { scriptRuntime.Shutdown(); } } } } }
/// <summary> /// Initializes the event arguments. /// </summary> /// <param name="testPackage">The test package.</param> /// <param name="testExplorationOptions">The test exploration options.</param> /// <param name="testExecutionOptions">The test execution options.</param> /// <param name="reportLockBox">The report lock-box which may be used to access the report asynchronously during execution.</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="testPackage"/>, /// <paramref name="testExplorationOptions"/> or <paramref name="testExecutionOptions"/> is null.</exception> public RunStartedEventArgs(TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, LockBox <Report> reportLockBox) { if (testPackage == null) { throw new ArgumentNullException("testPackage"); } if (testExplorationOptions == null) { throw new ArgumentNullException("testExplorationOptions"); } if (testExecutionOptions == null) { throw new ArgumentNullException("testExecutionOptions"); } this.testPackage = testPackage; this.testExplorationOptions = testExplorationOptions; this.testExecutionOptions = testExecutionOptions; this.reportLockBox = reportLockBox; }
private List <Test> getTests(string assembly) { var testIsolationProvider = (ITestIsolationProvider)RuntimeAccessor.ServiceLocator.ResolveByComponentId("Gallio.LocalTestIsolationProvider"); var testIsolationOptions = new TestIsolationOptions(); ITestIsolationContext testIsolationContext = testIsolationProvider.CreateContext(testIsolationOptions, _logger); // Create a test package. // You can set a whole bunch of options here. var testPackage = new TestPackage(); testPackage.AddFile(new FileInfo(assembly)); testPackage.TestFrameworkFallbackMode = TestFrameworkFallbackMode.Strict; var testExplorationOptions = new TestExplorationOptions(); // This query you can answer by exploring tests and looking at their metadata for "TestsOn". // You can explore tests using the Explore method of TestDriver. It sends a bunch of // messages to an IMessageSink just like Run. Just scan these messages for tests that // match the pattern you like. // Alternately, you can build a TestModel once up front and traverse the resulting test tree // to find what you need. The Explore method of testDriver is just like Run, except that // it does not run the tests. // TestModelSerializer is a little helper we can use to build up a TestModel from test messages on the fly. // The TestModel is just a tree of test metadata. Pretty straightforward. TestModel testModel = new TestModel(); IMessageSink messageSink = TestModelSerializer.CreateMessageSinkToPopulateTestModel(testModel); var logProgressMonitorProvider = new LogProgressMonitorProvider(_logger); logProgressMonitorProvider.Run((progressMonitor) => { _testDriver.Explore(testIsolationContext, testPackage, testExplorationOptions, messageSink, progressMonitor); }); return(testModel.AllTests.Where(x => x.IsTestCase).ToList()); }
/// <inheritdoc /> sealed protected override void DescribeImpl(IReflectionPolicy reflectionPolicy, IList <ICodeElementInfo> codeElements, TestExplorationOptions testExplorationOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(string.Format("Describing {0} tests.", FrameworkName), 100)) { progressMonitor.SetStatus("Building the test model."); GenerateTestModel(reflectionPolicy, codeElements, messageSink); } }
/// <inheritdoc /> protected override sealed void RunAssembly(Assembly assembly, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(string.Format("Running {0} tests.", FrameworkName), 100)) { using (TestHarness testHarness = CreateTestHarness()) { IDisposable appDomainState = null; try { progressMonitor.SetStatus("Setting up the test harness."); appDomainState = testHarness.SetUpAppDomain(); progressMonitor.Worked(1); progressMonitor.SetStatus("Building the test model."); TestModel testModel = GenerateTestModel(assembly, messageSink); progressMonitor.Worked(3); progressMonitor.SetStatus("Building the test commands."); ITestContextManager testContextManager = CreateTestContextManager(messageSink); ITestCommand rootTestCommand = GenerateCommandTree(testModel, testExecutionOptions, testContextManager); progressMonitor.Worked(2); progressMonitor.SetStatus("Running the tests."); if (rootTestCommand != null) { RunTestCommandsAction action = new RunTestCommandsAction(this, rootTestCommand, testExecutionOptions, testHarness, testContextManager, progressMonitor.CreateSubProgressMonitor(93)); if (testExecutionOptions.SingleThreaded) { // The execution options require the use of a single thread. action.Run(); } else { // Create a new thread so that we can consistently set the default apartment // state to STA and so as to reduce the effective stack depth during the // test run. We use Thread instead of ThreadTask because we do not // require the ability to abort the Thread so we do not need to take the // extra overhead. Thread thread = new Thread(action.Run); thread.Name = "Simple Test Driver"; thread.SetApartmentState(ApartmentState.STA); thread.Start(); thread.Join(); } if (action.Exception != null) throw new ModelException("A fatal exception occurred while running test commands.", action.Exception); } else { progressMonitor.Worked(93); } } finally { progressMonitor.SetStatus("Tearing down the test harness."); if (appDomainState != null) appDomainState.Dispose(); progressMonitor.Worked(1); } } } }
/// <inheritdoc /> protected override sealed void ExploreAssembly(Assembly assembly, TestExplorationOptions testExplorationOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(string.Format("Exploring {0} tests.", FrameworkName), 5)) { using (TestHarness testHarness = CreateTestHarness()) { IDisposable appDomainState = null; try { progressMonitor.SetStatus("Setting up the test harness."); appDomainState = testHarness.SetUpAppDomain(); progressMonitor.Worked(1); progressMonitor.SetStatus("Building the test model."); GenerateTestModel(assembly, messageSink); progressMonitor.Worked(3); } finally { progressMonitor.SetStatus("Tearing down the test harness."); if (appDomainState != null) appDomainState.Dispose(); progressMonitor.Worked(1); } } } }
/// <inheritdoc /> protected override sealed void DescribeImpl(IReflectionPolicy reflectionPolicy, IList<ICodeElementInfo> codeElements, TestExplorationOptions testExplorationOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(string.Format("Describing {0} tests.", FrameworkName), 100)) { progressMonitor.SetStatus("Building the test model."); GenerateTestModel(reflectionPolicy, codeElements, messageSink); } }
private void ExploreOrRun(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor, string taskName) { using (progressMonitor.BeginTask(taskName, 1)) { if (progressMonitor.IsCanceled) { return; } FileInfo testDriverScriptFile = GetTestDriverScriptFile(testPackage); if (testDriverScriptFile == null) { return; } HostSetup hostSetup = CreateHostSetup(testPackage); ScriptRuntimeSetup scriptRuntimeSetup = CreateScriptRuntimeSetup(testPackage); string testDriverScriptPath = testDriverScriptFile.FullName; var remoteMessageSink = new RemoteMessageSink(messageSink); var remoteLogger = new RemoteLogger(logger); using (var remoteProgressMonitor = new RemoteProgressMonitor(progressMonitor.CreateSubProgressMonitor(1))) { testIsolationContext.RunIsolatedTask <ExploreOrRunTask>(hostSetup, (statusMessage) => progressMonitor.SetStatus(statusMessage), new object[] { testPackage, scriptRuntimeSetup, testDriverScriptPath, testExplorationOptions, testExecutionOptions, remoteMessageSink, remoteProgressMonitor, remoteLogger }); } } }
/// <inheritdoc /> sealed protected override void RunImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { ExploreOrRun(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, messageSink, progressMonitor, "Running tests."); }
/// <inheritdoc /> public Report Run(TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IProgressMonitor progressMonitor) { if (testPackage == null) { throw new ArgumentNullException("testPackageConfig"); } if (testExplorationOptions == null) { throw new ArgumentNullException("testExplorationOptions"); } if (testExecutionOptions == null) { throw new ArgumentNullException("testExecutionOptions"); } if (progressMonitor == null) { throw new ArgumentNullException("progressMonitor"); } ThrowIfDisposed(); if (state != State.Initialized) { throw new InvalidOperationException("The test runner must be initialized before this operation is performed."); } testPackage = testPackage.Copy(); testExplorationOptions = testExplorationOptions.Copy(); testExecutionOptions = testExecutionOptions.Copy(); GenericCollectionUtils.ForEach(testRunnerOptions.Properties, x => testPackage.AddProperty(x.Key, x.Value)); using (progressMonitor.BeginTask("Running the tests.", 10)) { Stopwatch stopwatch = Stopwatch.StartNew(); Report report = new Report() { TestPackage = new TestPackageData(testPackage), TestModel = new TestModelData(), TestPackageRun = new TestPackageRun() { StartTime = DateTime.Now } }; var reportLockBox = new LockBox <Report>(report); eventDispatcher.NotifyRunStarted(new RunStartedEventArgs(testPackage, testExplorationOptions, testExecutionOptions, reportLockBox)); bool success; using (Listener listener = new Listener(eventDispatcher, tappedLogger, reportLockBox)) { try { ITestDriver testDriver = testFrameworkManager.GetTestDriver( testPackage.CreateTestFrameworkSelector(), tappedLogger); using (testIsolationContext.BeginBatch(progressMonitor.SetStatus)) { testDriver.Run(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, listener, progressMonitor.CreateSubProgressMonitor(10)); } success = true; } catch (Exception ex) { success = false; tappedLogger.Log(LogSeverity.Error, "A fatal exception occurred while running tests. Possible causes include invalid test runner parameters and stack overflows.", ex); report.TestModel.Annotations.Add(new AnnotationData(AnnotationType.Error, CodeLocation.Unknown, CodeReference.Unknown, "A fatal exception occurred while running tests. See log for details.", null)); } finally { report.TestPackageRun.EndTime = DateTime.Now; report.TestPackageRun.Statistics.Duration = stopwatch.Elapsed.TotalSeconds; } } eventDispatcher.NotifyRunFinished(new RunFinishedEventArgs(success, report)); return(report); } }
private void ExploreOrRun <TTask>(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor, string taskName) where TTask : AbstractTask, new() { double totalWorkUnits = Math.Max(testPackage.Files.Count, 1); using (progressMonitor.BeginTask(taskName, totalWorkUnits)) { var remoteMessageSink = new RemoteMessageSink(messageSink); var remoteLogger = new RemoteLogger(logger); if (progressMonitor.IsCanceled) { return; } using (var remoteProgressMonitor = new RemoteProgressMonitor(progressMonitor)) { foreach (FileInfo file in testPackage.Files) { HostSetup hostSetup = CreateHostSetup(testPackage, file); testIsolationContext.RunIsolatedTask <TTask>(hostSetup, progressMonitor.SetStatus, new object[] { testPackage, testExplorationOptions, testExecutionOptions, remoteMessageSink, remoteProgressMonitor, remoteLogger, file, }); } remoteProgressMonitor.Worked(1); } } }
/// <inheritdoc /> protected sealed override void ExploreImpl(ITestIsolationContext testIsolationContext, TestPackage testPackage, TestExplorationOptions testExplorationOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { ExploreOrRun <ExploreTask>(testIsolationContext, testPackage, testExplorationOptions, null, messageSink, progressMonitor, "Exploring MbUnitCpp tests."); }
/// <inheritdoc /> sealed protected override void RunAssembly(Assembly assembly, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IMessageSink messageSink, IProgressMonitor progressMonitor) { using (progressMonitor.BeginTask(string.Format("Running {0} tests.", FrameworkName), 100)) { using (TestHarness testHarness = CreateTestHarness()) { IDisposable appDomainState = null; try { progressMonitor.SetStatus("Setting up the test harness."); appDomainState = testHarness.SetUpAppDomain(); progressMonitor.Worked(1); progressMonitor.SetStatus("Building the test model."); TestModel testModel = GenerateTestModel(assembly, messageSink); progressMonitor.Worked(3); progressMonitor.SetStatus("Building the test commands."); ITestContextManager testContextManager = CreateTestContextManager(messageSink); ITestCommand rootTestCommand = GenerateCommandTree(testModel, testExecutionOptions, testContextManager); progressMonitor.Worked(2); progressMonitor.SetStatus("Running the tests."); if (rootTestCommand != null) { RunTestCommandsAction action = new RunTestCommandsAction(this, rootTestCommand, testExecutionOptions, testHarness, testContextManager, progressMonitor.CreateSubProgressMonitor(93)); if (testExecutionOptions.SingleThreaded) { // The execution options require the use of a single thread. action.Run(); } else { // Create a new thread so that we can consistently set the default apartment // state to STA and so as to reduce the effective stack depth during the // test run. We use Thread instead of ThreadTask because we do not // require the ability to abort the Thread so we do not need to take the // extra overhead. Thread thread = new Thread(action.Run); thread.Name = "Simple Test Driver"; thread.SetApartmentState(ApartmentState.STA); thread.Start(); thread.Join(); } if (action.Exception != null) { throw new ModelException("A fatal exception occurred while running test commands.", action.Exception); } } else { progressMonitor.Worked(93); } } finally { progressMonitor.SetStatus("Tearing down the test harness."); if (appDomainState != null) { appDomainState.Dispose(); } progressMonitor.Worked(1); } } } }
private FacadeTaskResult RunTests() { var logger = new FacadeLoggerWrapper(facadeLogger); var runner = TestRunnerUtils.CreateTestRunnerByName(StandardTestRunnerFactoryNames.IsolatedAppDomain); // Set parameters. var testPackage = new TestPackage(); foreach (var assemblyLocation in assemblyLocations) { testPackage.AddFile(new FileInfo(assemblyLocation)); } testPackage.ShadowCopy = facadeTaskExecutorConfiguration.ShadowCopy; if (facadeTaskExecutorConfiguration.AssemblyFolder != null) { testPackage.ApplicationBaseDirectory = new DirectoryInfo(facadeTaskExecutorConfiguration.AssemblyFolder); testPackage.WorkingDirectory = new DirectoryInfo(facadeTaskExecutorConfiguration.AssemblyFolder); } var testRunnerOptions = new TestRunnerOptions(); var testExplorationOptions = new TestExplorationOptions(); var filters = GenericCollectionUtils.ConvertAllToArray <string, Filter <string> >(explicitTestIds, testId => new EqualityFilter <string>(testId)); var filterSet = new FilterSet <ITestDescriptor>(new IdFilter <ITestDescriptor>(new OrFilter <string>(filters))); var testExecutionOptions = new TestExecutionOptions { FilterSet = filterSet }; // Install the listeners. runner.Events.TestStepStarted += TestStepStarted; runner.Events.TestStepFinished += TestStepFinished; runner.Events.TestStepLifecyclePhaseChanged += TestStepLifecyclePhaseChanged; // Run the tests. try { try { runner.Initialize(testRunnerOptions, logger, CreateProgressMonitor()); Report report = runner.Run(testPackage, testExplorationOptions, testExecutionOptions, CreateProgressMonitor()); if (sessionId != null) { SessionCache.SaveSerializedReport(sessionId, report); } return(FacadeTaskResult.Success); } catch (Exception ex) { if (sessionId != null) { SessionCache.ClearSerializedReport(sessionId); } logger.Log(LogSeverity.Error, "A fatal exception occurred during test execution.", ex); return(FacadeTaskResult.Exception); } finally { SubmitFailureForRemainingPendingTasks(); } } finally { runner.Dispose(CreateProgressMonitor()); } }
public IEnumerable <AutoTest.TestRunners.Shared.Results.TestResult> Run(RunSettings settings) { if (!_isInitialized) { return new AutoTest.TestRunners.Shared.Results.TestResult[] { getNotInitializedResult(settings) } } ; var tests = settings.Assembly.Tests.ToList(); var members = settings.Assembly.Members.ToList(); var namespaces = settings.Assembly.Namespaces.ToList(); var runAll = namespaces.Count == 0 && members.Count == 0 && tests.Count == 0; var steps = new List <TestStepData>(); var testResults = new List <AutoTest.TestRunners.Shared.Results.TestResult>(); // Get a test isolation context. Here we want to run tests in the same AppDomain. var testIsolationProvider = (ITestIsolationProvider)RuntimeAccessor.ServiceLocator.ResolveByComponentId("Gallio.LocalTestIsolationProvider"); var testIsolationOptions = new TestIsolationOptions(); ITestIsolationContext testIsolationContext = testIsolationProvider.CreateContext(testIsolationOptions, _logger); var testPackage = new TestPackage(); testPackage.AddFile(new FileInfo(settings.Assembly.Assembly)); testPackage.TestFrameworkFallbackMode = TestFrameworkFallbackMode.Strict; // Create some test exploration options. Nothing interesting for you here, probably. var testExplorationOptions = new TestExplorationOptions(); var messageSink = new MessageConsumer() .Handle <TestStepStartedMessage>((message) => { steps.Add(message.Step); }) .Handle <TestStepFinishedMessage>(message => { var test = steps.FirstOrDefault(x => x.Id.Equals(message.StepId) && x.IsTestCase); if (test == null) { return; } var fixture = string.Format("{0}.{1}", test.CodeReference.NamespaceName, steps.First(x => x.Id.Equals(test.ParentId)).Name); testResults.Add(new AutoTest.TestRunners.Shared.Results.TestResult( "MbUnit", settings.Assembly.Assembly, fixture, message.Result.Duration.TotalMilliseconds, string.Format("{0}.{1}", fixture, test.Name), convertState(message.Result.Outcome.Status), message.Result.Outcome.DisplayName)); }); // Provide a progress monitor. var logProgressMonitorProvider = new LogProgressMonitorProvider(_logger); var options = new TestExecutionOptions(); options.FilterSet = new Gallio.Model.Filters.FilterSet <ITestDescriptor>(new OrFilter <ITestDescriptor>(getTestFilter(namespaces, members, tests))); // Run the tests. logProgressMonitorProvider.Run((progressMonitor) => { _testDriver.Run(testIsolationContext, testPackage, testExplorationOptions, options, messageSink, progressMonitor); }); return(testResults); }