public void RunSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml() { var runSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml = @" <RunSettings> <RunConfiguration> <TargetFrameworkVersion>Framework45</TargetFrameworkVersion> <ResultsDirectory>C:\TestProject1\TestResults</ResultsDirectory> <SolutionDirectory>C:\TestProject1\</SolutionDirectory> <TargetPlatform>X86</TargetPlatform> </RunConfiguration> <MSTest> <SettingsFile>C:\TestProject1\TestSettings1.testsettings</SettingsFile> <ForcedLegacyMode>true</ForcedLegacyMode> <IgnoreTestImpact>true</IgnoreTestImpact> </MSTest> <DataCollectionRunSettings> <DataCollectors> <DataCollector friendlyName=""Video"" uri=""datacollector://Microsoft/Video/2.0"" enabled=""false"" assemblyQualifiedName=""VideoCollector""> </DataCollector> </DataCollectors> <DataCollectors> <DataCollector friendlyName=""EventLog"" uri=""datacollector://Microsoft/Log/2.0"" enabled=""false"" assemblyQualifiedName=""LogCollector""> </DataCollector> </DataCollectors> </DataCollectionRunSettings> </RunSettings>" ; // Act and validate Assert.IsFalse(InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings( runSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml), "Invalid response"); Assert.IsFalse(InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings( ConvertOutOfProcToInProcDataCollectionSettings(runSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml)), "Invalid response"); }
public void RunSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml() { var runSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml = @" <RunSettings> <RunConfiguration> <TargetFrameworkVersion>Framework45</TargetFrameworkVersion> <ResultsDirectory>C:\TestProject1\TestResults</ResultsDirectory> <SolutionDirectory>C:\TestProject1\</SolutionDirectory> <TargetPlatform>X86</TargetPlatform> </RunConfiguration> <MSTest> <SettingsFile>C:\TestProject1\TestSettings1.testsettings</SettingsFile> <ForcedLegacyMode>true</ForcedLegacyMode> <IgnoreTestImpact>true</IgnoreTestImpact> </MSTest> <DataCollectionRunSettings> <DataCollectors> <DataCollector friendlyName=""Code Coverage"" uri=""datacollector://Microsoft/CodeCoverage/2.0"" assemblyQualifiedName=""DynamicCoverageDataCollector""> </DataCollector> <DataCollector friendlyName=""UnitTestIsolation"" uri=""datacollector://Microsoft/unittestisolation/1.0"" assemblyQualifiedName=""DynamicCoverageDataCollector""> </DataCollector> </DataCollectors> </DataCollectionRunSettings> </RunSettings>" ; // Act and validate Assert.IsFalse(InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings( runSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml), "Invalid response"); Assert.IsTrue(InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings( ConvertOutOfProcToInProcDataCollectionSettings(runSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml)), "Invalid response"); }
/// <inheritdoc/> public void StartTestSession( StartTestSessionPayload payload, ITestHostLauncher testHostLauncher, ITestSessionEventsHandler eventsHandler, ProtocolConfig protocolConfig) { EqtTrace.Info("TestRequestManager.StartTestSession: Starting test session."); if (payload.TestPlatformOptions != null) { this.telemetryOptedIn = payload.TestPlatformOptions.CollectMetrics; } var requestData = this.GetRequestData(protocolConfig); if (this.UpdateRunSettingsIfRequired( payload.RunSettings, payload.Sources, null, out string updatedRunsettings)) { payload.RunSettings = updatedRunsettings; } if (InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings(payload.RunSettings)) { throw new SettingsException( string.Format( Resources.RunsettingsWithDCErrorMessage, payload.RunSettings)); } // TODO (copoiena): Collect metrics ? lock (this.syncObject) { try { EqtTrace.Info("TestRequestManager.StartTestRunner: Synchronization context taken."); this.TestPlatformEventSourceInstance.StartTestSessionStart(); var criteria = new StartTestSessionCriteria() { Sources = payload.Sources, RunSettings = payload.RunSettings, TestHostLauncher = testHostLauncher }; this.testPlatform.StartTestSession(requestData, criteria, eventsHandler); } finally { EqtTrace.Info("TestRequestManager.StartTestSession: Starting test session completed."); this.TestPlatformEventSourceInstance.StartTestSessionStop(); // Post the attachments processing complete event. this.metricsPublisher.Result.PublishMetrics( TelemetryDataConstants.StartTestSessionCompleteEvent, requestData.MetricsCollection.Metrics); } } }
/// <inheritdoc /> public void RunTests( TestRunRequestPayload testRunRequestPayload, ITestHostLauncher testHostLauncher, ITestRunEventsRegistrar testRunEventsRegistrar, ProtocolConfig protocolConfig) { EqtTrace.Info("TestRequestManager.RunTests: run tests started."); TestRunCriteria runCriteria = null; var runsettings = testRunRequestPayload.RunSettings; if (testRunRequestPayload.TestPlatformOptions != null) { this.telemetryOptedIn = testRunRequestPayload.TestPlatformOptions.CollectMetrics; } var requestData = this.GetRequestData(protocolConfig); // Get sources to auto detect fx and arch for both run selected or run all scenario. var sources = GetSources(testRunRequestPayload); if (this.UpdateRunSettingsIfRequired( runsettings, sources, testRunEventsRegistrar, out string updatedRunsettings)) { runsettings = updatedRunsettings; } if (InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings(runsettings)) { throw new SettingsException( string.Format( Resources.RunsettingsWithDCErrorMessage, runsettings)); } var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettings); var batchSize = runConfiguration.BatchSize; if (requestData.IsTelemetryOptedIn) { // Collect metrics. this.CollectMetrics(requestData, runConfiguration); // Collect commands. this.LogCommandsTelemetryPoints(requestData); // Collect data for legacy settings. this.LogTelemetryForLegacySettings(requestData, runsettings); } // Get Fakes data collector settings. if (!string.Equals(Environment.GetEnvironmentVariable("VSTEST_SKIP_FAKES_CONFIGURATION"), "1")) { // The commandline options do not have sources in design time mode, // and so we fall back to using sources instead. if (this.commandLineOptions.Sources.Any()) { GenerateFakesUtilities.GenerateFakesSettings( this.commandLineOptions, this.commandLineOptions.Sources.ToList(), ref runsettings); } else if (sources.Any()) { GenerateFakesUtilities.GenerateFakesSettings( this.commandLineOptions, sources, ref runsettings); } } if (testRunRequestPayload.Sources != null && testRunRequestPayload.Sources.Any()) { runCriteria = new TestRunCriteria( testRunRequestPayload.Sources, batchSize, testRunRequestPayload.KeepAlive, runsettings, this.commandLineOptions.TestStatsEventTimeout, testHostLauncher, testRunRequestPayload.TestPlatformOptions?.TestCaseFilter, testRunRequestPayload.TestPlatformOptions?.FilterOptions, testRunRequestPayload.TestSessionInfo, debugEnabledForTestSession: testRunRequestPayload.TestSessionInfo != null && testRunRequestPayload.DebuggingEnabled); } else { runCriteria = new TestRunCriteria( testRunRequestPayload.TestCases, batchSize, testRunRequestPayload.KeepAlive, runsettings, this.commandLineOptions.TestStatsEventTimeout, testHostLauncher, testRunRequestPayload.TestSessionInfo, debugEnabledForTestSession: testRunRequestPayload.TestSessionInfo != null && testRunRequestPayload.DebuggingEnabled); } // Run tests. try { this.RunTests( requestData, runCriteria, testRunEventsRegistrar, testRunRequestPayload.TestPlatformOptions); EqtTrace.Info("TestRequestManager.RunTests: run tests completed."); } finally { this.TestPlatformEventSourceInstance.ExecutionRequestStop(); // Post the run complete event this.metricsPublisher.Result.PublishMetrics( TelemetryDataConstants.TestExecutionCompleteEvent, requestData.MetricsCollection.Metrics); } }