/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { Console.WriteLine("Total Executed: {0}", e.TestRunStatistics.ExecutedTests); Console.WriteLine("Total Passed: {0}", e.TestRunStatistics[TestOutcome.Passed]); Console.WriteLine("Total Failed: {0}", e.TestRunStatistics[TestOutcome.Failed]); Console.WriteLine("Total Skipped: {0}", e.TestRunStatistics[TestOutcome.Skipped]); Console.WriteLine("##teamcity[testSuiteFinished name='suite.name']"); }
public void TestRunCompleteIsHandled() { var events = new MyTestLoggerEvents(); var logger = new JUnitTestLogger(); logger.Initialize(events, new Dictionary<string, string>()); var stats = new MyTestRunStatistics(6, 1, 2, 3); var e1 = new TestRunCompleteEventArgs(stats, false, false, null, null, TimeSpan.FromSeconds(1)); events.FireTestRunComplete(e1); }
void events_TestRunComplete(object sender, TestRunCompleteEventArgs e) { try { testManager.TestAreDone(e.TestRunStatistics, e.IsCanceled, e.IsAborted, e.Error, e.AttachmentSets, e.ElapsedTimeInRunningTests); } catch(Exception ex) { Console.WriteLine("ERROR: {0}", ex.ToString()); } }
/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { Output.WriteLine(string.Empty, OutputLevel.Information); // Printing Run-level Attachments int runLevelAttachementCount = (e.AttachmentSets == null) ? 0 : e.AttachmentSets.Sum(attachmentSet => attachmentSet.Attachments.Count); if (runLevelAttachementCount > 0) { Output.WriteLine(CommandLineResources.AttachmentsBanner, OutputLevel.Information); foreach (AttachmentSet attachmentSet in e.AttachmentSets) { foreach (UriDataAttachment uriDataAttachment in attachmentSet.Attachments) { string attachmentOutput = string.Format(CultureInfo.CurrentCulture, CommandLineResources.AttachmentOutputFormat, uriDataAttachment.Uri.LocalPath); Output.WriteLine(attachmentOutput, OutputLevel.Information); } } Output.WriteLine(String.Empty, OutputLevel.Information); } // Output a summary. if (this.testsTotal > 0) { if (this.testOutcome == TestOutcome.Failed) { Output.WriteLine(string.Format(CultureInfo.CurrentCulture, CommandLineResources.TestRunSummary, testsTotal, testsPassed, testsFailed, testsSkipped), OutputLevel.Information); using (new ConsoleColorHelper(ConsoleColor.Red)) { Output.WriteLine(CommandLineResources.TestRunFailed, OutputLevel.Error); } } else { Output.WriteLine(string.Format(CultureInfo.CurrentCulture, CommandLineResources.TestRunSummary, testsTotal, testsPassed, testsFailed, testsSkipped), OutputLevel.Information); using (new ConsoleColorHelper(ConsoleColor.Green)) { Output.WriteLine(CommandLineResources.TestRunSuccessful, OutputLevel.Information); } } if (!e.ElapsedTimeInRunningTests.Equals(TimeSpan.Zero)) { PrintTimeSpan(e.ElapsedTimeInRunningTests); } else { EqtTrace.Info("Skipped printing test execution time on console because it looks like the test run had faced some errors"); } } }
public void HandlePartialRunCompleteShouldCreateNewProxyExecutionManagerIfIsAbortedIsTrue() { var completeArgs = new TestRunCompleteEventArgs(null, true, true, null, null, TimeSpan.Zero); this.mockTestHostManager = new Mock <ITestRuntimeProvider>(); this.mockRequestSender = new Mock <ITestRequestSender>(); var parallelExecutionManager = this.SetupExecutionManager(this.proxyManagerFunc, 2, setupTestCases: true); this.proxyManagerFuncCalled = false; var proxyExecutionManagerManager = new ProxyExecutionManager(this.mockRequestData.Object, this.mockRequestSender.Object, this.mockTestHostManager.Object); parallelExecutionManager.HandlePartialRunComplete(proxyExecutionManagerManager, completeArgs, null, null, null); Assert.IsTrue(this.proxyManagerFuncCalled); }
public void HandleTestRunComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { if (lastChunkArgs != null && lastChunkArgs.NewTestResults != null) { this.TestResults.AddRange(lastChunkArgs.NewTestResults); } Console.WriteLine("TestRunComplete"); waitHandle.Set(); }
private void RaiseTestRunComplete( Exception exception, bool canceled, bool aborted, bool adapterHintToShutdownAfterRun, TimeSpan elapsedTime) { var runStats = this.testRunCache?.TestRunStatistics ?? new TestRunStatistics(new Dictionary <TestOutcome, long>()); var lastChunkTestResults = this.testRunCache?.GetLastChunk() ?? new List <TestResult>(); if (this.testRunEventsHandler != null) { // Collecting Total Tests Run this.requestData.MetricsCollection.Add(TelemetryDataConstants.TotalTestsRun, runStats.ExecutedTests); // Collecting Test Run State this.requestData.MetricsCollection.Add(TelemetryDataConstants.RunState, canceled ? "Canceled" : (aborted ? "Aborted" : "Completed")); // Collecting Number of Adapters Used to run tests. this.requestData.MetricsCollection.Add(TelemetryDataConstants.NumberOfAdapterUsedToRunTests, this.ExecutorUrisThatRanTests.Count()); if (lastChunkTestResults.Any() && this.IsTestSourceIsPackage()) { this.UpdateTestCaseSourceToPackage(lastChunkTestResults, null, out lastChunkTestResults, out var updatedTestCases); } var testRunChangedEventArgs = new TestRunChangedEventArgs(runStats, lastChunkTestResults, Enumerable.Empty <TestCase>()); // Adding Metrics along with Test Run Complete Event Args Collection <AttachmentSet> attachments = this.frameworkHandle?.Attachments; var testRunCompleteEventArgs = new TestRunCompleteEventArgs( runStats, canceled, aborted, exception, attachments, elapsedTime); testRunCompleteEventArgs.Metrics = this.requestData.MetricsCollection.Metrics; this.testRunEventsHandler.HandleTestRunComplete( testRunCompleteEventArgs, testRunChangedEventArgs, attachments, this.executorUrisThatRanTests); } else { EqtTrace.Warning("Could not pass run completion as the callback is null. Aborted :{0}", aborted); } }
/// <summary> /// Called when a test run is completed. /// </summary> internal void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { List <TestResultInfo> results; lock (_resultsGuard) { results = _results; _results = new List <TestResultInfo>(); } var doc = new XDocument(CreateAssembliesElement(results)); doc.Save(File.OpenWrite(_outputFilePath)); }
public void HandleTestRunComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { if (lastChunkArgs?.ActiveTests != null) { foreach (var activeTest in lastChunkArgs.ActiveTests) { _inProgress[activeTest.Id] = activeTest; } } if (lastChunkArgs?.NewTestResults != null) { CaptureTestResults(lastChunkArgs.NewTestResults); } if (!testRunCompleteArgs.IsCanceled && (_inProgress.Any() || _runs.Values.Any(t => !t.IsComplete()))) { // report ongoing tests and test case with missing results as timeouts. TestsInTimeout = _inProgress.Values.Union(_runs.Values.Where(t => !t.IsComplete()).Select(t => t.Result().TestCase)).ToList(); if (TestsInTimeout.Count > 0) { TimeOut = true; } } ResultsUpdated?.Invoke(this, EventArgs.Empty); if (testRunCompleteArgs.Error != null) { if (testRunCompleteArgs.Error.GetType() == typeof(TransationLayerException)) { _logger.LogDebug(testRunCompleteArgs.Error, $"{_runnerId}: VsTest may have crashed, triggering VsTest restart!"); VsTestFailed?.Invoke(this, EventArgs.Empty); } else if (testRunCompleteArgs.Error.InnerException is IOException sock) { _logger.LogWarning(sock, $"{_runnerId}: Test session ended unexpectedly."); } else if (!CancelRequested) { _logger.LogDebug(testRunCompleteArgs.Error, $"{_runnerId}: VsTest error:"); } } _waitHandle.Set(); }
/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { string s = "Total Executed: " + e.TestRunStatistics.ExecutedTests + "\n"; s += "Total Passed: " + e.TestRunStatistics[TestOutcome.Passed] + "\n"; s += "Total Failed: " + e.TestRunStatistics[TestOutcome.Failed] + "\n"; s += "Total Skippeds: " + e.TestRunStatistics[TestOutcome.Skipped] + "\n"; SendMessage(0, s); File.WriteAllText("test.test", s); //Shutdown(); content += "\n" + s; var trxOutputWriter = new MsTestTrxXmlWriter(testResults, e, testRunStarted); trxOutputWriter.WriteTrxOutput(testRunDirectory, content); }
/// <summary> /// Handles the TestRunRequest complete event /// </summary> /// <param name="sender"></param> /// <param name="e">RunCompletion args</param> private void TestRunRequest_OnRunCompletion(object sender, TestRunCompleteEventArgs e) { // If run is not aborted/cancelled then check the count of executed tests. // we need to check if there are any tests executed - to try show some help info to user to check for installed vsix extensions if (!e.IsAborted && !e.IsCanceled) { var testsFoundInAnySource = (e.TestRunStatistics == null) ? false : (e.TestRunStatistics.ExecutedTests > 0); // Indicate the user to use testadapterpath command if there are no tests found if (!testsFoundInAnySource && string.IsNullOrEmpty(CommandLineOptions.Instance.TestAdapterPath)) { this.output.Warning(false, CommandLineResources.SuggestTestAdapterPathIfNoTestsIsFound); } } }
/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { if (_currentAssemblySuite != null) { _currentAssemblySuite.Dispose(); } _vsTestSuite.Dispose(); _teamCityWriter.Dispose(); Trace.WriteLine(string.Format("Total Executed: {0}", e.TestRunStatistics.ExecutedTests)); Trace.WriteLine(string.Format("Total Passed: {0}", e.TestRunStatistics[TestOutcome.Passed])); Trace.WriteLine(string.Format("Total Failed: {0}", e.TestRunStatistics[TestOutcome.Failed])); Trace.WriteLine(string.Format("Total Skipped: {0}", e.TestRunStatistics[TestOutcome.Skipped])); }
private void Events_TestRunComplete(object sender, TestRunCompleteEventArgs e) { //TODO: apply smarter way to finish suites in real-time tests execution //finish suites while (_suitesflow.Count != 0) { var deeperKey = _suitesflow.Keys.OrderBy(s => s.Split('.').Length).Last(); TraceLogger.Verbose($"Finishing namespace '{deeperKey}'"); var deeperSuite = _suitesflow[deeperKey]; var finishSuiteRequest = new FinishTestItemRequest { EndTime = DateTime.UtcNow, //TODO: identify correct suite status based on inner nodes Status = Status.Passed }; deeperSuite.Finish(finishSuiteRequest); _suitesflow.Remove(deeperKey); } // finish launch var requestFinishLaunch = new FinishLaunchRequest { EndTime = DateTime.UtcNow }; _launchReporter.Finish(requestFinishLaunch); Stopwatch stopwatch = Stopwatch.StartNew(); Console.Write("Finishing to send results to Report Portal..."); try { _launchReporter.Sync(); } catch (Exception exp) { Console.WriteLine(exp); throw; } stopwatch.Stop(); Console.WriteLine($" Sync time: {stopwatch.Elapsed}"); }
/// <inheritdoc/> public void SendExecutionComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { var payload = new TestRunCompletePayload { TestRunCompleteArgs = testRunCompleteArgs, LastRunTests = lastChunkArgs, RunAttachments = runContextAttachments, ExecutorUris = executorUris }; this.communicationManager.SendMessage(MessageType.ExecutionComplete, payload, this.protocolVersion); }
/// <summary> /// Constructs listObj and summaryObj used in functions.js /// </summary> private string ConstructListAndSummaryObj(TestRunCompleteEventArgs e) { StringBuilder sb = new StringBuilder(); sb.AppendLine(); sb.AppendLine("var listObj = " + txtToJSON.TestCasesString(txtResultFolderPath, captureFolderPath) + ";"); sb.Append("var summaryObj = " + txtToJSON.SummaryTable(e.TestRunStatistics.ExecutedTests, e.TestRunStatistics[TestOutcome.Passed], e.TestRunStatistics[TestOutcome.Failed], testRunStartTime, testRunEndTime) + ";"); // Clean the temp file File.Delete(txtToJSON.CaseCategoryFile); return(sb.ToString()); }
public void HandlePartialRunCompleteShouldCreateNewProxyExecutionManagerIfDataCollectionEnabled() { var completeArgs = new TestRunCompleteEventArgs(null, true, true, null, null, TimeSpan.Zero, null); this.mockTestHostManager = new Mock <ITestRuntimeProvider>(); this.mockRequestSender = new Mock <ITestRequestSender>(); this.mockDataCollectionManager = new Mock <IProxyDataCollectionManager>(); var proxyDataCollectionManager = new ProxyExecutionManagerWithDataCollection(this.mockRequestData.Object, this.mockRequestSender.Object, this.mockTestHostManager.Object, this.mockDataCollectionManager.Object); var parallelExecutionManager = this.SetupExecutionManager(this.proxyManagerFunc, 2, setupTestCases: true); parallelExecutionManager.StartTestRun(this.testRunCriteriaWithTests, this.mockHandler.Object); Assert.IsTrue(this.executionCompleted.Wait(taskTimeout), "Test run not completed."); this.proxyManagerFuncCalled = false; parallelExecutionManager.HandlePartialRunComplete(proxyDataCollectionManager, completeArgs, null, null, null); Assert.IsTrue(this.proxyManagerFuncCalled); }
/// <summary> /// Handles the TestRunRequest complete event /// </summary> /// <param name="sender"></param> /// <param name="e">RunCompletion args</param> private void TestRunRequest_OnRunCompletion(object sender, TestRunCompleteEventArgs e) { // If run is not aborted/cancelled then check the count of executed tests. // we need to check if there are any tests executed - to try show some help info to user to check for installed vsix extensions if (!e.IsAborted && !e.IsCanceled) { this.testsFoundInAnySource = (e.TestRunStatistics == null) ? false : (e.TestRunStatistics.ExecutedTests > 0); // TODO: We need to show a message to check for vsix extensions if no tests are executed // Indicate the user to use vsix extensions command if there are no tests found //if (Utilities.ShouldIndicateTheUserToUseVsixExtensionsCommand(testsFoundInAnySource, commandLineOptions)) //{ // output.Information(CommandLineResources.SuggestUseVsixExtensionsIfNoTestsIsFound); // output.WriteLine(string.Empty, OutputLevel.Information); //} } }
public void HandleRunCompleteShouldCollectMetrics() { var mockMetricsCollector = new Mock <IMetricsCollection>(); this.mockRequestData.Setup(rd => rd.MetricsCollection).Returns(mockMetricsCollector.Object); var completeArgs = new TestRunCompleteEventArgs(null, false, false, null, null, TimeSpan.Zero, null); this.mockParallelProxyExecutionManager.Setup(mp => mp.HandlePartialRunComplete( this.mockProxyExecutionManager.Object, completeArgs, null, null, null)).Returns(true); // Act this.parallelRunEventsHandler.HandleTestRunComplete(completeArgs, null, null, null); // Verify. mockMetricsCollector.Verify(rd => rd.Add(TelemetryDataConstants.RunState, It.IsAny <string>()), Times.Once); }
public void HandleTestRunComplete(TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { HandleTestResults(lastChunkArgs?.NewTestResults); if (testRunCompleteArgs.Error != null) { tcs.TrySetException(testRunCompleteArgs.Error); } else if (testRunCompleteArgs.IsCanceled || testRunCompleteArgs.IsAborted) { tcs.TrySetCanceled(); } else { tcs.TrySetResult(results); } }
public void HandleRunCompleteShouldNotCallLastChunkResultsIfNotPresent() { var completeArgs = new TestRunCompleteEventArgs(null, false, false, null, null, TimeSpan.Zero, null); this.mockParallelProxyExecutionManager.Setup(mp => mp.HandlePartialRunComplete( this.mockProxyExecutionManager.Object, completeArgs, null, null, null)).Returns(false); this.parallelRunEventsHandler.HandleTestRunComplete(completeArgs, null, null, null); // Raw message must be sent this.mockTestRunEventsHandler.Verify(mt => mt.HandleRawMessage(It.IsAny <string>()), Times.Never); this.mockTestRunEventsHandler.Verify(mt => mt.HandleTestRunStatsChange(null), Times.Never); this.mockParallelProxyExecutionManager.Verify(mp => mp.HandlePartialRunComplete( this.mockProxyExecutionManager.Object, completeArgs, null, null, null), Times.Once); }
/// <summary> /// Called when a test run is completed. /// </summary> /// <param name="sender"> /// The sender. /// </param> /// <param name="e"> /// Test run complete events arguments. /// </param> internal void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { if (e.AttachmentSets == null) { return; } var coverageAttachments = e.AttachmentSets .Where(dataCollectionAttachment => CodeCoverageDataCollectorUri.Equals(dataCollectionAttachment.Uri)).ToArray(); Console.WriteLine($"CoverageLogger.TestRunCompleteHandler: Found {coverageAttachments.Length} attachments."); if (coverageAttachments.Any()) { var codeCoverageFiles = coverageAttachments.Select(coverageAttachment => coverageAttachment.Attachments[0].Uri.LocalPath).ToArray(); foreach (var codeCoverageFile in codeCoverageFiles) { var resultFile = Path.Combine(Path.GetDirectoryName(codeCoverageFile), Path.GetFileNameWithoutExtension(codeCoverageFile) + ".xml"); try { this.codeCoverageUtility.AnalyzeCoverageFile(codeCoverageFile, this.GetCodeCoverageExePath()); } catch (Exception ex) { Console.WriteLine(ex.Message); } var summary = this.codeCoverageUtility.GetCoverageSummary(resultFile); Console.WriteLine(summary); var coverageFileContents = XDocument.Parse(File.ReadAllText(resultFile)); try { var codeCoverageInternalRepresentation = new CodeCoverageReader().ParseCoverageFile(coverageFileContents, coverageFileContents.Root.Name.Namespace); var lcovFilePath = Path.Combine(new DirectoryInfo(Path.GetDirectoryName(codeCoverageFile)).Parent.FullName, "lcov.info"); File.WriteAllText(lcovFilePath, new Generator().GenerateLcovCoverageData(codeCoverageInternalRepresentation).ToString()); } catch (Exception err) { Console.WriteLine(err); } } } }
/// <summary> /// Handles the Run Complete event from a parallel proxy manager /// </summary> public override void HandleTestRunComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { var parallelRunComplete = HandleSingleTestRunComplete(testRunCompleteArgs, lastChunkArgs, runContextAttachments, executorUris); if (parallelRunComplete) { // TODO : use TestPluginCache to iterate over all IDataCollectorAttachments { var coverageHandler = new CodeCoverageDataAttachmentsHandler(); Uri attachementUri = coverageHandler.GetExtensionUri(); if (attachementUri != null) { var coverageAttachments = runDataAggregator.RunContextAttachments .Where(dataCollectionAttachment => attachementUri.Equals(dataCollectionAttachment.Uri)).ToArray(); foreach (var coverageAttachment in coverageAttachments) { runDataAggregator.RunContextAttachments.Remove(coverageAttachment); } ICollection <AttachmentSet> attachments = coverageHandler.HandleDataCollectionAttachmentSets(new Collection <AttachmentSet>(coverageAttachments)); foreach (var attachment in attachments) { runDataAggregator.RunContextAttachments.Add(attachment); } } } var completedArgs = new TestRunCompleteEventArgs(this.runDataAggregator.GetAggregatedRunStats(), this.runDataAggregator.IsCanceled, this.runDataAggregator.IsAborted, this.runDataAggregator.GetAggregatedException(), this.runDataAggregator.RunContextAttachments, this.runDataAggregator.ElapsedTime); // Add Metrics from Test Host completedArgs.Metrics = this.runDataAggregator.GetAggregatedRunDataMetrics(); HandleParallelTestRunComplete(completedArgs); } }
/// <summary> /// Called when a test run is completed. /// </summary> internal void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { try { List <TestResultInfo> resultList; lock (this.resultsGuard) { resultList = this.results; this.results = new List <TestResultInfo>(); } var doc = new XDocument(this.CreateTestSuitesElement(resultList)); // Create directory if not exist var loggerFileDirPath = Path.GetDirectoryName(this.outputFilePath); if (!Directory.Exists(loggerFileDirPath)) { Directory.CreateDirectory(loggerFileDirPath); } var settings = new XmlWriterSettings() { Encoding = new UTF8Encoding(this.FileEncodingOption == FileEncoding.UTF8Bom), Indent = true, }; using (var f = File.Create(this.outputFilePath)) { using (var w = XmlWriter.Create(f, settings)) { doc.Save(w); } } var resultsFileMessage = string.Format(CultureInfo.CurrentCulture, "JunitXML Logger - Results File: {0}", this.outputFilePath); Console.WriteLine(Environment.NewLine + resultsFileMessage); } catch (Exception ex) { Console.WriteLine("JunitXML Logger: Threw an unhandeled exception. "); Console.WriteLine(ex.Message); Console.WriteLine(ex.Source); throw; } }
public void HandleTestRunComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { if (lastChunkArgs != null && lastChunkArgs.NewTestResults != null) { this.TestResults.AddRange(lastChunkArgs.NewTestResults); } if (testRunCompleteArgs.AttachmentSets != null) { this.Attachments.AddRange(testRunCompleteArgs.AttachmentSets); } this.Metrics = testRunCompleteArgs.Metrics; }
public TestRunCompleteWorkflowTests() { this.fileSystem = new FakeFileSystem(); this.testRun = new TestRunBuilder() .WithLoggerConfiguration(new LoggerConfiguration(new () { { LoggerConfiguration.LogFilePathKey, "/tmp/results.json" } })) .WithFileSystem(this.fileSystem) .WithConsoleOutput(new FakeConsoleOutput()) .WithStore(new TestResultStore()) .WithSerializer(new JsonTestResultSerializer()) .Build(); this.testRunCompleteEvent = new TestRunCompleteEventArgs( stats: new TestRunStatistics(), isCanceled: false, isAborted: false, error: null, attachmentSets: new Collection <AttachmentSet>(), elapsedTime: TimeSpan.Zero); }
public void HandleRawMessageShouldInvokeShouldInvokeHandleTestRunStatsChangeOfLoggerManagerWhenLastChunkAvailable() { var mockStats = new Mock <ITestRunStatistics>(); var testResults = new List <ObjectModel.TestResult> { new ObjectModel.TestResult( new ObjectModel.TestCase( "A.C.M", new Uri("executor://dummy"), "A")) }; var activeTestCases = new List <ObjectModel.TestCase> { new ObjectModel.TestCase( "A.C.M2", new Uri("executor://dummy"), "A") }; this.loggerManager.Setup(x => x.LoggersInitialized).Returns(true); this.mockDataSerializer.Setup(x => x.DeserializeMessage(It.IsAny <string>())) .Returns(new Message() { MessageType = MessageType.ExecutionComplete }); var testRunChangedEventArgs = new TestRunChangedEventArgs(mockStats.Object, testResults, activeTestCases); var testRunCompleteEvent = new TestRunCompleteEventArgs(new TestRunStatistics(1, null), false, false, null, null, TimeSpan.FromSeconds(0)); this.mockDataSerializer.Setup(x => x.DeserializePayload <TestRunCompletePayload>(It.IsAny <Message>())) .Returns(new TestRunCompletePayload() { TestRunCompleteArgs = testRunCompleteEvent, LastRunTests = testRunChangedEventArgs }); this.testRunRequest.ExecuteAsync(); this.testRunRequest.HandleRawMessage(string.Empty); loggerManager.Verify(lm => lm.HandleTestRunStatsChange(testRunChangedEventArgs), Times.Once); loggerManager.Verify(lm => lm.HandleTestRunComplete(It.IsAny <TestRunCompleteEventArgs>()), Times.Once); }
protected void HandleParallelTestRunComplete(TestRunCompleteEventArgs completedArgs) { // In case of sequential execution - RawMessage would have contained a 'TestRunCompletePayload' object // To send a rawmessge - we need to create rawmessage from an aggregated payload object var testRunCompletePayload = new TestRunCompletePayload() { ExecutorUris = this.runDataAggregator.ExecutorUris, LastRunTests = null, RunAttachments = this.runDataAggregator.RunContextAttachments, TestRunCompleteArgs = completedArgs }; // we have to send rawmessages as we block the runcomplete actual raw messages ConvertToRawMessageAndSend(MessageType.ExecutionComplete, testRunCompletePayload); // send actual test runcomplete to clients this.actualRunEventsHandler.HandleTestRunComplete( completedArgs, null, this.runDataAggregator.RunContextAttachments, this.runDataAggregator.ExecutorUris); }
public void HandleRawMessageShouldGetDataCollectorAttachments() { var testRunCompleteEventArgs = new TestRunCompleteEventArgs(null, false, false, null, new Collection <AttachmentSet>(), new TimeSpan()); this.mockDataSerializer.Setup(x => x.DeserializeMessage(It.IsAny <string>())).Returns(new Message() { MessageType = MessageType.ExecutionComplete }); this.mockDataSerializer.Setup(x => x.DeserializePayload <TestRunCompletePayload>(It.IsAny <Message>())) .Returns(new TestRunCompletePayload() { TestRunCompleteArgs = testRunCompleteEventArgs }); this.testRunEventHandler.HandleRawMessage(string.Empty); this.proxyDataCollectionManager.Verify( dcm => dcm.AfterTestRunEnd(false, It.IsAny <ITestRunEventsHandler>()), Times.Once); }
/// <inheritdoc /> public void SendExecutionComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { var data = this.dataSerializer.SerializePayload( MessageType.ExecutionComplete, new TestRunCompletePayload { TestRunCompleteArgs = testRunCompleteArgs, LastRunTests = lastChunkArgs, RunAttachments = runContextAttachments, ExecutorUris = executorUris }, this.protocolVersion); this.SendData(data); }
private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { _testRun.Finished = DateTime.UtcNow; _testRun.TestRunStatistics = e.TestRunStatistics; _testRun.IsCanceled = e.IsCanceled; _testRun.IsAborted = e.IsAborted; _testRun.Error = e.Error; _testRun.AttachmentSets = e.AttachmentSets; _testRun.ElapsedTimeInRunningTests = e.ElapsedTimeInRunningTests; OnTestRunComplete(_testRun); var report = GenerateReport(); var fileName = GetFileName(); SaveReport(fileName, report); ConsoleOutput.Instance.Information(false, $"Saved report to: {fileName}"); RemoveEventHandlers(); }
/// <summary> /// Handles test run complete. /// </summary> /// <param name="e">TestRunComplete event args.</param> public void HandleTestRunComplete(TestRunCompleteEventArgs e) { if (!this.isDisposed) { try { this.loggerEvents.CompleteTestRun(e.TestRunStatistics, e.IsCanceled, e.IsAborted, e.Error, e.AttachmentSets, e.ElapsedTimeInRunningTests); } finally { this.Dispose(); } } else { // Note: We are not raising warning in case testLoggerManager is disposed as HandleRawMessage and HandleTestRunComplete both can call HandleTestRunComplete. EqtTrace.Verbose("TestLoggerManager.HandleTestRunComplete: Ignoring as the object is disposed."); } }
/// <summary> /// Handles the Run Complete event from a parallel proxy manager /// </summary> public virtual void HandleTestRunComplete( TestRunCompleteEventArgs testRunCompleteArgs, TestRunChangedEventArgs lastChunkArgs, ICollection <AttachmentSet> runContextAttachments, ICollection <string> executorUris) { var parallelRunComplete = HandleSingleTestRunComplete(testRunCompleteArgs, lastChunkArgs, runContextAttachments, executorUris); if (parallelRunComplete) { var completedArgs = new TestRunCompleteEventArgs(runDataAggregator.GetAggregatedRunStats(), runDataAggregator.IsCanceled, runDataAggregator.IsAborted, runDataAggregator.GetAggregatedException(), new Collection <AttachmentSet>(runDataAggregator.RunCompleteArgsAttachments), runDataAggregator.ElapsedTime); HandleParallelTestRunComplete(completedArgs); } }
public void AttachmentInformationShouldBeWrittenToConsoleIfAttachmentsArePresent() { var attachmentSet = new AttachmentSet(new Uri("test://uri"), "myattachmentset"); var uriDataAttachment = new UriDataAttachment(new Uri("file://server/filename.ext"), "description"); attachmentSet.Attachments.Add(uriDataAttachment); var uriDataAttachment1 = new UriDataAttachment(new Uri("file://server/filename1.ext"), "description"); attachmentSet.Attachments.Add(uriDataAttachment1); var attachmentSetList = new List <AttachmentSet>(); attachmentSetList.Add(attachmentSet); var testRunCompleteEventArgs = new TestRunCompleteEventArgs(null, false, false, null, new Collection <AttachmentSet>(attachmentSetList), new TimeSpan(1, 0, 0, 0)); // Raise an event on mock object raised to register test case count and mark Outcome as Outcome.Failed this.testRunRequest.Raise(m => m.OnRunCompletion += null, testRunCompleteEventArgs); this.mockOutput.Verify(o => o.WriteLine(string.Format(CultureInfo.CurrentCulture, CommandLineResources.AttachmentOutputFormat, uriDataAttachment.Uri.LocalPath), OutputLevel.Information), Times.Once()); this.mockOutput.Verify(o => o.WriteLine(string.Format(CultureInfo.CurrentCulture, CommandLineResources.AttachmentOutputFormat, uriDataAttachment1.Uri.LocalPath), OutputLevel.Information), Times.Once()); }
public void FireTestRunComplete(TestRunCompleteEventArgs e) { Debug.Assert(TestRunComplete != null, "TestRunComplete != null"); Debug.Assert(e != null, "e != null"); TestRunComplete(this, e); }
/// <summary> /// Constructs listObj and summaryObj used in functions.js /// </summary> private string ConstructListAndSummaryObj(TestRunCompleteEventArgs e) { StringBuilder sb = new StringBuilder(); sb.AppendLine(); sb.AppendLine("var listObj = " + txtToJSON.TestCasesString(txtResultFolderPath, captureFolderPath) + ";"); sb.Append("var summaryObj = " + txtToJSON.SummaryTable(e.TestRunStatistics.ExecutedTests, e.TestRunStatistics[TestOutcome.Passed], e.TestRunStatistics[TestOutcome.Failed], testRunStartTime, testRunEndTime) + ";"); // Clean the temp file File.Delete(txtToJSON.CaseCategoryFile); return sb.ToString(); }
/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { // Insert the necessary info used in index.html and copy it to report folder. File.WriteAllText(Path.Combine(reportFolderPath, indexHtmlName), ConstructIndexHtml(e)); }
/// <summary> /// Inserts the corresponding script to the template html and generates the index.html /// </summary> private string ConstructIndexHtml(TestRunCompleteEventArgs e) { return InsertScriptToTemplate(Properties.Resources.index, ConstructListAndSummaryObj(e)); }
/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { if (_currentAssemblySuite != null) _currentAssemblySuite.Dispose(); _vsTestSuite.Dispose(); _teamCityWriter.Dispose(); Trace.WriteLine(string.Format("Total Executed: {0}", e.TestRunStatistics.ExecutedTests)); Trace.WriteLine(string.Format("Total Passed: {0}", e.TestRunStatistics[TestOutcome.Passed])); Trace.WriteLine(string.Format("Total Failed: {0}", e.TestRunStatistics[TestOutcome.Failed])); Trace.WriteLine(string.Format("Total Skipped: {0}", e.TestRunStatistics[TestOutcome.Skipped])); }
/// <summary> /// Called when a test run is completed. /// </summary> private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e) { try { Console.WriteLine("Total Executed: {0}", e.TestRunStatistics.ExecutedTests); Console.WriteLine("Total Passed: {0}", e.TestRunStatistics[TestOutcome.Passed]); Console.WriteLine("Total Failed: {0}", e.TestRunStatistics[TestOutcome.Failed]); Console.WriteLine("Total Skipped: {0}", e.TestRunStatistics[TestOutcome.Skipped]); var root = new TestRun {TestSuites = new List<TestSuite>()}; var result = new TestSuite { Name = "VS Test result", Failures = (int) e.TestRunStatistics[TestOutcome.Failed], Skipped = (int) e.TestRunStatistics[TestOutcome.Skipped], Errors = (int) e.TestRunStatistics[TestOutcome.None] + (int) e.TestRunStatistics[TestOutcome.NotFound], Tests = (int) e.TestRunStatistics.ExecutedTests, TestCases = testCases.ToList(), Timestamp = DateTime.Now, Time = e.ElapsedTimeInRunningTests.TotalSeconds, SystemOut = string.Join(Environment.NewLine, stdOut), Hostname = machineName, Properties = new List<property> { new property {Name = "IsAborted", Value = e.IsAborted.ToString()}, new property {Name = "IsCanceled", Value = e.IsCanceled.ToString()}, } }; if (e.Error != null) { result.SystemOut = e.Error + Environment.NewLine + result.SystemOut; result.Properties.Add(new property {Name = "Error", Value = e.Error.ToString()}); } root.TestSuites.Add(result); var ser = new XmlSerializer(typeof (TestRun)); string fileName; if (!startupParameters.TryGetValue("TestResultsFile", out fileName) || string.IsNullOrWhiteSpace(fileName)) { fileName = "TestResult.xml"; } Console.WriteLine("Writing the results into {0}", Path.GetFullPath(fileName)); using (FileStream fs = File.Create(fileName)) { ser.Serialize(fs, root); } } catch (IOException ex) { Console.WriteLine("IO exception: {0}", ex.Message); } catch (SerializationException ex) { Console.WriteLine("Serialization exception: {0}", ex.Message); } catch (Exception ex) { Console.WriteLine(ex.ToString()); throw; } }