public void RunSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml() { var runSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml = @" <RunSettings> <RunConfiguration> <TargetFrameworkVersion>Framework45</TargetFrameworkVersion> <ResultsDirectory>C:\TestProject1\TestResults</ResultsDirectory> <SolutionDirectory>C:\TestProject1\</SolutionDirectory> <TargetPlatform>X86</TargetPlatform> </RunConfiguration> <MSTest> <SettingsFile>C:\TestProject1\TestSettings1.testsettings</SettingsFile> <ForcedLegacyMode>true</ForcedLegacyMode> <IgnoreTestImpact>true</IgnoreTestImpact> </MSTest> <DataCollectionRunSettings> <DataCollectors> <DataCollector friendlyName=""Code Coverage"" uri=""datacollector://Microsoft/CodeCoverage/2.0"" assemblyQualifiedName=""DynamicCoverageDataCollector""> </DataCollector> <DataCollector friendlyName=""UnitTestIsolation"" uri=""datacollector://Microsoft/unittestisolation/1.0"" assemblyQualifiedName=""DynamicCoverageDataCollector""> </DataCollector> </DataCollectors> </DataCollectionRunSettings> </RunSettings>"; // Act and validate Assert.IsFalse(InferRunSettingsHelper.AreRunSettingsCollectorsInCompatibleWithTestSettings( runSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml), "Invalid response"); Assert.IsTrue(InferRunSettingsHelper.AreRunSettingsCollectorsInCompatibleWithTestSettings( ConvertOutOfProcToInProcDataCollectionSettings(runSettingsWithFakesAndCodeCoverageAndInlineTestSettingsXml)), "Invalid response"); }
public void RunSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml() { var runSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml = @" <RunSettings> <RunConfiguration> <TargetFrameworkVersion>Framework45</TargetFrameworkVersion> <ResultsDirectory>C:\TestProject1\TestResults</ResultsDirectory> <SolutionDirectory>C:\TestProject1\</SolutionDirectory> <TargetPlatform>X86</TargetPlatform> </RunConfiguration> <MSTest> <SettingsFile>C:\TestProject1\TestSettings1.testsettings</SettingsFile> <ForcedLegacyMode>true</ForcedLegacyMode> <IgnoreTestImpact>true</IgnoreTestImpact> </MSTest> <DataCollectionRunSettings> <DataCollectors> <DataCollector friendlyName=""Video"" uri=""datacollector://Microsoft/Video/2.0"" enabled=""false"" assemblyQualifiedName=""VideoCollector""> </DataCollector> </DataCollectors> <DataCollectors> <DataCollector friendlyName=""EventLog"" uri=""datacollector://Microsoft/Log/2.0"" enabled=""false"" assemblyQualifiedName=""LogCollector""> </DataCollector> </DataCollectors> </DataCollectionRunSettings> </RunSettings>"; // Act and validate Assert.IsFalse(InferRunSettingsHelper.AreRunSettingsCollectorsInCompatibleWithTestSettings( runSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml), "Invalid response"); Assert.IsFalse(InferRunSettingsHelper.AreRunSettingsCollectorsInCompatibleWithTestSettings( ConvertOutOfProcToInProcDataCollectionSettings(runSettingsWithDisabledCollectionSettingsAndInlineTestSettingsXml)), "Invalid response"); }
/// <summary> /// Run Tests with given a set of test cases. /// </summary> /// <param name="testRunRequestPayload">TestRun request Payload</param> /// <param name="testHostLauncher">TestHost Launcher for the run</param> /// <param name="testRunEventsRegistrar">event registrar for run events</param> /// <param name="protocolConfig">Protocol related information</param> /// <returns>True, if successful</returns> public void RunTests(TestRunRequestPayload testRunRequestPayload, ITestHostLauncher testHostLauncher, ITestRunEventsRegistrar testRunEventsRegistrar, ProtocolConfig protocolConfig) { EqtTrace.Info("TestRequestManager.RunTests: run tests started."); TestRunCriteria runCriteria = null; var runsettings = testRunRequestPayload.RunSettings; if (testRunRequestPayload.TestPlatformOptions != null) { this.telemetryOptedIn = testRunRequestPayload.TestPlatformOptions.CollectMetrics; } var requestData = this.GetRequestData(protocolConfig); // Get sources to auto detect fx and arch for both run selected or run all scenario. var sources = GetSources(testRunRequestPayload); if (this.UpdateRunSettingsIfRequired(runsettings, sources, out string updatedRunsettings)) { runsettings = updatedRunsettings; } if (InferRunSettingsHelper.AreRunSettingsCollectorsInCompatibleWithTestSettings(runsettings)) { throw new SettingsException(string.Format(Resources.RunsettingsWithDCErrorMessage, runsettings)); } var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettings); var batchSize = runConfiguration.BatchSize; if (requestData.IsTelemetryOptedIn) { // Collect Metrics this.CollectMetrics(requestData, runConfiguration); // Collect Commands this.LogCommandsTelemetryPoints(requestData); // Collect data for Legacy Settings this.LogTelemetryForLegacySettings(requestData, runsettings); } if (!commandLineOptions.IsDesignMode) { // Generate fakes settings only for command line scenarios. In case of // Editors/IDEs, this responsibility is with the caller. GenerateFakesUtilities.GenerateFakesSettings(this.commandLineOptions, this.commandLineOptions.Sources.ToList(), ref runsettings); } if (testRunRequestPayload.Sources != null && testRunRequestPayload.Sources.Any()) { runCriteria = new TestRunCriteria( testRunRequestPayload.Sources, batchSize, testRunRequestPayload.KeepAlive, runsettings, this.commandLineOptions.TestStatsEventTimeout, testHostLauncher, testRunRequestPayload.TestPlatformOptions?.TestCaseFilter, testRunRequestPayload.TestPlatformOptions?.FilterOptions); } else { runCriteria = new TestRunCriteria( testRunRequestPayload.TestCases, batchSize, testRunRequestPayload.KeepAlive, runsettings, this.commandLineOptions.TestStatsEventTimeout, testHostLauncher); } // Run tests try { this.RunTests(requestData, runCriteria, testRunEventsRegistrar, testRunRequestPayload.TestPlatformOptions); EqtTrace.Info("TestRequestManager.RunTests: run tests completed."); } finally { this.testPlatformEventSource.ExecutionRequestStop(); // Post the run complete event this.metricsPublisher.Result.PublishMetrics(TelemetryDataConstants.TestExecutionCompleteEvent, requestData.MetricsCollection.Metrics); } }
/// <summary> /// Run Tests with given a set of test cases. /// </summary> /// <param name="testRunRequestPayload">TestRun request Payload</param> /// <param name="testHostLauncher">TestHost Launcher for the run</param> /// <param name="testRunEventsRegistrar">event registrar for run events</param> /// <param name="protocolConfig">Protocol related information</param> /// <returns>True, if successful</returns> public void RunTests(TestRunRequestPayload testRunRequestPayload, ITestHostLauncher testHostLauncher, ITestRunEventsRegistrar testRunEventsRegistrar, ProtocolConfig protocolConfig) { EqtTrace.Info("TestRequestManager.RunTests: run tests started."); TestRunCriteria runCriteria = null; var runsettings = testRunRequestPayload.RunSettings; if (testRunRequestPayload.TestPlatformOptions != null) { this.telemetryOptedIn = testRunRequestPayload.TestPlatformOptions.CollectMetrics; } var requestData = this.GetRequestData(protocolConfig); // Get sources to auto detect fx and arch for both run selected or run all scenario. var sources = GetSources(testRunRequestPayload); if (this.UpdateRunSettingsIfRequired(runsettings, sources, testRunEventsRegistrar, out string updatedRunsettings)) { runsettings = updatedRunsettings; } if (InferRunSettingsHelper.AreRunSettingsCollectorsInCompatibleWithTestSettings(runsettings)) { throw new SettingsException(string.Format(Resources.RunsettingsWithDCErrorMessage, runsettings)); } var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettings); var batchSize = runConfiguration.BatchSize; if (requestData.IsTelemetryOptedIn) { // Collect Metrics this.CollectMetrics(requestData, runConfiguration); // Collect Commands this.LogCommandsTelemetryPoints(requestData); // Collect data for Legacy Settings this.LogTelemetryForLegacySettings(requestData, runsettings); } // get Fakes data collector settings if (!string.Equals(Environment.GetEnvironmentVariable("VSTEST_SKIP_FAKES_CONFIGURATION"), "1")) { // The commandline Options do not have sources in design time mode, // and so we fall back to using sources instead if (this.commandLineOptions.Sources.Any()) { GenerateFakesUtilities.GenerateFakesSettings(this.commandLineOptions, this.commandLineOptions.Sources.ToList(), ref runsettings); } else if (sources.Any()) { GenerateFakesUtilities.GenerateFakesSettings(this.commandLineOptions, sources, ref runsettings); } } if (testRunRequestPayload.Sources != null && testRunRequestPayload.Sources.Any()) { runCriteria = new TestRunCriteria( testRunRequestPayload.Sources, batchSize, testRunRequestPayload.KeepAlive, runsettings, this.commandLineOptions.TestStatsEventTimeout, testHostLauncher, testRunRequestPayload.TestPlatformOptions?.TestCaseFilter, testRunRequestPayload.TestPlatformOptions?.FilterOptions); } else { runCriteria = new TestRunCriteria( testRunRequestPayload.TestCases, batchSize, testRunRequestPayload.KeepAlive, runsettings, this.commandLineOptions.TestStatsEventTimeout, testHostLauncher); } // Run tests try { this.RunTests(requestData, runCriteria, testRunEventsRegistrar, testRunRequestPayload.TestPlatformOptions); EqtTrace.Info("TestRequestManager.RunTests: run tests completed."); } finally { this.testPlatformEventSource.ExecutionRequestStop(); // Post the run complete event this.metricsPublisher.Result.PublishMetrics(TelemetryDataConstants.TestExecutionCompleteEvent, requestData.MetricsCollection.Metrics); } }