public TestRunManagerTests()
 {
     this.publisher = new Mock <ITestRunPublisher>();
     this.diagnosticDataCollector = new Mock <IDiagnosticDataCollector>();
     this.telemetryDataCollector  = new Mock <ITelemetryDataCollector>();
     this.testRunManager          = new TestRunManager(publisher.Object, diagnosticDataCollector.Object, telemetryDataCollector.Object);
 }
 private void AddTestRun(object sender, RoutedEventArgs e)
 {
     string projectTitle = PromptDialog.Prompt("Test Run name", "Create new test run");
     if (!string.IsNullOrWhiteSpace(projectTitle))
     {
         TestRunManager runManager = new TestRunManager();
         TestRunProxy proxyProject = ProxyConverter.TestRunModelToProxy(runManager.Create(projectTitle));
         this.UITestRunList.Add(proxyProject);
     }
 }
Exemplo n.º 3
0
        public async Task TestRunManager_PublishTestRun_TestSummaryWithoutTests()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var runManager = new TestRunManager(publisher.Object, logger.Object);

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.CompletedTask);

            await runManager.PublishAsync(new TestRun("fake/1", 1));

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()), Times.Never());
            logger.Verify(x => x.Error(It.IsAny <string>()));
        }
Exemplo n.º 4
0
        public async Task TestRunManager_PublishTestRun_TestSummaryWithoutTests()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var telemetry  = new Mock <ITelemetryDataCollector>();
            var runManager = new TestRunManager(publisher.Object, logger.Object, telemetry.Object);

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>()));

            await runManager.PublishAsync(new TestRun("fake/1", "somename", 1));

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()), Times.Never());
            logger.Verify(x => x.Error(It.IsAny <string>()));
        }
Exemplo n.º 5
0
        public async Task TestRunManager_PublishTestRun_TestRunIsNotValid()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var telemetry  = new Mock <ITelemetryDataCollector>();
            var runManager = new TestRunManager(publisher.Object, logger.Object, telemetry.Object);

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>()));

            await runManager.PublishAsync(null);

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()), Times.Never());
            logger.Verify(x => x.Error(It.IsAny <string>()));
        }
        /// <summary>
        /// Publish the current test run and reset the parser
        /// </summary>
        private void PublishAndReset(LogData logData)
        {
            Logger.Info($"PythonTestResultParser : PublishAndReset : Publishing TestRun {_currentTestRunId} at line {logData.LineNumber}.");

            foreach (var failedTest in _currentTestRun.FailedTests)
            {
                if (failedTest.StackTrace != null)
                {
                    failedTest.StackTrace = failedTest.StackTrace.TrimEnd();
                }
            }

            TestRunManager.PublishAsync(_currentTestRun);
            Reset(logData);
        }
Exemplo n.º 7
0
        /// <inheritdoc />
        public async Task InitializeAsync(IClientFactory clientFactory, IPipelineConfig pipelineConfig, ITraceLogger traceLogger)
        {
            await Task.Run(() =>
            {
                _logger         = traceLogger;
                var publisher   = new PipelineTestRunPublisher(clientFactory, pipelineConfig);
                var telemetry   = new TelemetryDataCollector(clientFactory);
                _testRunManager = new TestRunManager(publisher, _logger);
                var parsers     = ParserFactory.GetTestResultParsers(_testRunManager, traceLogger, telemetry);

                foreach (var parser in parsers)
                {
                    //Subscribe parsers to Pub-Sub model
                    Subscribe(parser.Parse);
                }
            });
        }
Exemplo n.º 8
0
        public async Task TestRunManager_PublishMultipleRuns()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var runManager = new TestRunManager(publisher.Object, logger.Object);
            var fakeRun    = new TestRun("mocha/1", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalTests = 7
                }
            };

            publisher.SetupSequence(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.CompletedTask).Returns(Task.CompletedTask);

            RunTasks(runManager, fakeRun);
            await runManager.FinalizeAsync();

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()), Times.Exactly(2));
        }
Exemplo n.º 9
0
        public async Task TestRunManager_PublishMultipleRuns()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var telemetry  = new Mock <ITelemetryDataCollector>();
            var runManager = new TestRunManager(publisher.Object, logger.Object, telemetry.Object);
            var fakeRun    = new TestRun("mocha/1", "somename", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalTests = 7
                }
            };

            publisher.SetupSequence(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.FromResult(GetFakePipelineTestRun(fakeRun, 1))).Returns(Task.FromResult(GetFakePipelineTestRun(fakeRun, 2)));

            RunTasks(runManager, fakeRun);
            await runManager.FinalizeAsync();

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()), Times.Exactly(2));
        }
Exemplo n.º 10
0
        public async Task TestRunManager_PublishTestRun_SummaryDoesnotMatchTestRun()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var telemetry  = new Mock <ITelemetryDataCollector>();
            var runManager = new TestRunManager(publisher.Object, logger.Object, telemetry.Object);
            var fakeRun    = new TestRun("fake/1", "somename", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalPassed  = 5,
                    TotalFailed  = 3,
                    TotalSkipped = 2
                },
                PassedTests = new List <TestResult>
                {
                    new TestResult()
                },
                FailedTests = new List <TestResult>
                {
                    new TestResult()
                },
                SkippedTests = new List <TestResult>
                {
                    new TestResult()
                }
            };

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.FromResult(GetFakePipelineTestRun(fakeRun, 1)));

            await runManager.PublishAsync(fakeRun);

            publisher.Verify(x => x.PublishAsync(It.Is <TestRun>(run => run.TestRunSummary.TotalTests == 10 &&
                                                                 run.PassedTests.Count == 0 &&
                                                                 run.FailedTests.Count == 0 &&
                                                                 run.SkippedTests.Count == 0 &&
                                                                 run.TestRunSummary.TotalPassed == 5 &&
                                                                 run.TestRunSummary.TotalFailed == 3 &&
                                                                 run.TestRunSummary.TotalSkipped == 2)));
        }
Exemplo n.º 11
0
        public static TestRunProxy TestRunModelToProxy(TestRun run)
        {
            TestRunProxy runProxy = new TestRunProxy();
            runProxy.ID = run.ID;
            runProxy.Name = run.Name;
            runProxy.CreatedBy = run.CreatedBy;
            runProxy.CreatedOn = run.CreatedOn;

            IEnumerable<TestComposite> compositeModel = new TestRunManager().GetCompositeByRunId(runProxy.ID);
            foreach (TestComposite comp in compositeModel)
            {
                ExtendedTestCaseProxy extendedTestCaseProxy = new ExtendedTestCaseProxy();
                extendedTestCaseProxy.Status = EnumUtil.ParseEnum<Status>(comp.TestCaseStatus);

                TestCase testCase = new TestManager().GetById(comp.TestCaseID);
                extendedTestCaseProxy.Id = testCase.ID;
                extendedTestCaseProxy.Title = testCase.Title;
                extendedTestCaseProxy.Priority = EnumUtil.ParseEnum<Priority>(testCase.Priority);
                extendedTestCaseProxy.Severity = EnumUtil.ParseEnum<Severity>(testCase.Severity);
                extendedTestCaseProxy.IsAutomated = testCase.IsAutomated;
                extendedTestCaseProxy.CreatedBy = testCase.CreatedBy;
                extendedTestCaseProxy.UpdatedBy = testCase.UpdatedBy;
                extendedTestCaseProxy.AreaID = testCase.AreaID;

                foreach (var item in new TestManager().GetStepDefinitionsById(testCase.ID))
                {
                    StepDefinitionProxy proxy = new StepDefinitionProxy();
                    proxy.Step = item.Step;
                    proxy.ExpectedResult = item.ExpectedResult;
                    proxy.ID = item.ID;
                    proxy.TestCaseID = item.TestCaseID;

                    extendedTestCaseProxy.StepDefinitionList.Add(proxy);
                }

                runProxy.TestCasesList.Add(extendedTestCaseProxy);
            }

            return runProxy;
        }
Exemplo n.º 12
0
        public async Task TestRunManager_PublishTestRun_TotalTestsLessThanActual()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var runManager = new TestRunManager(publisher.Object, logger.Object);

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.CompletedTask);

            await runManager.PublishAsync(new TestRun("fake/1", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalPassed        = 5,
                    TotalSkipped       = 1,
                    TotalFailed        = 1,
                    TotalExecutionTime = TimeSpan.FromMinutes(1),
                    TotalTests         = 6
                }
            });

            publisher.Verify(x => x.PublishAsync(It.Is <TestRun>(run => run.TestRunSummary.TotalTests == 7)));
        }
Exemplo n.º 13
0
        public async Task TestRunManager_PublishTestRun_SummaryDoesnotMatchTestRun()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var runManager = new TestRunManager(publisher.Object, logger.Object);

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.CompletedTask);

            await runManager.PublishAsync(new TestRun("fake/1", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalPassed  = 5,
                    TotalFailed  = 3,
                    TotalSkipped = 2
                },
                PassedTests = new List <TestResult>
                {
                    new TestResult()
                },
                FailedTests = new List <TestResult>
                {
                    new TestResult()
                },
                SkippedTests = new List <TestResult>
                {
                    new TestResult()
                }
            });

            publisher.Verify(x => x.PublishAsync(It.Is <TestRun>(run => run.TestRunSummary.TotalTests == 10 &&
                                                                 run.PassedTests.Count == 0 &&
                                                                 run.FailedTests.Count == 0 &&
                                                                 run.SkippedTests.Count == 0 &&
                                                                 run.TestRunSummary.TotalPassed == 5 &&
                                                                 run.TestRunSummary.TotalFailed == 3 &&
                                                                 run.TestRunSummary.TotalSkipped == 2)));
        }
Exemplo n.º 14
0
        public async Task TestRunManager_PublishTestRun()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var runManager = new TestRunManager(publisher.Object, logger.Object);
            var fakeRun    = new TestRun("mocha/1", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalPassed        = 5,
                    TotalSkipped       = 1,
                    TotalFailed        = 1,
                    TotalExecutionTime = TimeSpan.FromMinutes(1),
                    TotalTests         = 7
                }
            };

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.CompletedTask);

            await runManager.PublishAsync(fakeRun);

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()));
        }
Exemplo n.º 15
0
        public async Task TestRunManager_PublishTestRun()
        {
            var logger     = new Mock <ITraceLogger>();
            var publisher  = new Mock <ITestRunPublisher>();
            var telemetry  = new Mock <ITelemetryDataCollector>();
            var runManager = new TestRunManager(publisher.Object, logger.Object, telemetry.Object);
            var fakeRun    = new TestRun("mocha/1", "somename", 1)
            {
                TestRunSummary = new TestRunSummary
                {
                    TotalPassed        = 5,
                    TotalSkipped       = 1,
                    TotalFailed        = 1,
                    TotalExecutionTime = TimeSpan.FromMinutes(1),
                    TotalTests         = 7
                }
            };

            publisher.Setup(x => x.PublishAsync(It.IsAny <TestRun>())).Returns(Task.FromResult(GetFakePipelineTestRun(fakeRun, 1)));

            await runManager.PublishAsync(fakeRun);

            publisher.Verify(x => x.PublishAsync(It.IsAny <TestRun>()));
        }
        private void SaveRunStatus_Click(object sender, RoutedEventArgs e)
        {
            this.SetPreviousTestCaseStatus();
            foreach (KeyValuePair<ExtendedTestCaseProxy, Status> item in runStatus)
            {
                TestRunManager manager = new TestRunManager();
                manager.UpdateTestCaseStatus(RunId, item.Key.Id, item.Value);
            }

            this.CancelDialog();
        }
Exemplo n.º 17
0
        /// <summary>
        /// Publishes the run and resets the parser by resetting the state context and current state
        /// </summary>
        private void AttemptPublishAndResetParser()
        {
            Logger.Info($"MochaTestResultParser : Resetting the parser and attempting to publish the test run at line {_stateContext.CurrentLineNumber}.");
            var testRunToPublish = _stateContext.TestRun;

            // We have encountered failed test cases but no failed summary was encountered
            if (testRunToPublish.FailedTests.Count != 0 && testRunToPublish.TestRunSummary.TotalFailed == 0)
            {
                Logger.Error("MochaTestResultParser : Failed tests were encountered but no failed summary was encountered.");
                Telemetry.AddAndAggregate(MochaTelemetryConstants.FailedTestCasesFoundButNoFailedSummary,
                                          new List <int> {
                    _stateContext.TestRun.TestRunId
                }, MochaTelemetryConstants.EventArea);
            }
            else if (testRunToPublish.TestRunSummary.TotalFailed != testRunToPublish.FailedTests.Count)
            {
                // If encountered failed tests does not match summary fire telemetry
                Logger.Error($"MochaTestResultParser : Failed tests count does not match failed summary" +
                             $" at line {_stateContext.CurrentLineNumber}");
                Telemetry.AddAndAggregate(MochaTelemetryConstants.FailedSummaryMismatch,
                                          new List <int> {
                    testRunToPublish.TestRunId
                }, MochaTelemetryConstants.EventArea);
            }

            // We have encountered pending test cases but no pending summary was encountered
            if (testRunToPublish.SkippedTests.Count != 0 && testRunToPublish.TestRunSummary.TotalSkipped == 0)
            {
                Logger.Error("MochaTestResultParser : Skipped tests were encountered but no skipped summary was encountered.");
                Telemetry.AddAndAggregate(MochaTelemetryConstants.PendingTestCasesFoundButNoFailedSummary,
                                          new List <int> {
                    _stateContext.TestRun.TestRunId
                }, MochaTelemetryConstants.EventArea);
            }
            else if (testRunToPublish.TestRunSummary.TotalSkipped != testRunToPublish.SkippedTests.Count)
            {
                // If encountered skipped tests does not match summary fire telemetry
                Logger.Error($"MochaTestResultParser : Pending tests count does not match pending summary" +
                             $" at line {_stateContext.CurrentLineNumber}");
                Telemetry.AddAndAggregate(MochaTelemetryConstants.PendingSummaryMismatch,
                                          new List <int> {
                    testRunToPublish.TestRunId
                }, MochaTelemetryConstants.EventArea);
            }

            // Ensure some summary data was detected before attempting a publish, ie. check if the state is not test results state
            switch (_currentState)
            {
            case MochaParserStates.ExpectingTestResults:
                if (testRunToPublish.PassedTests.Count != 0 ||
                    testRunToPublish.FailedTests.Count != 0 ||
                    testRunToPublish.SkippedTests.Count != 0)
                {
                    Logger.Error("MochaTestResultParser : Skipping publish as testcases were encountered but no summary was encountered.");
                    Telemetry.AddAndAggregate(MochaTelemetryConstants.PassedTestCasesFoundButNoPassedSummary,
                                              new List <int> {
                        _stateContext.TestRun.TestRunId
                    }, MochaTelemetryConstants.EventArea);
                }
                break;

            default:
                // Publish the test run if reset and publish was called from any state other than the test results state

                // Calculate total tests
                testRunToPublish.TestRunSummary.TotalTests =
                    testRunToPublish.TestRunSummary.TotalPassed +
                    testRunToPublish.TestRunSummary.TotalFailed +
                    testRunToPublish.TestRunSummary.TotalSkipped;

                // Trim the stack traces of extra newlines etc.
                foreach (var failedTest in testRunToPublish.FailedTests)
                {
                    if (failedTest.StackTrace != null)
                    {
                        failedTest.StackTrace = failedTest.StackTrace.TrimEnd();
                    }
                }

                TestRunManager.PublishAsync(testRunToPublish);
                break;
            }

            ResetParser();
        }
Exemplo n.º 18
0
        /// <summary>
        /// Publishes the run and resets the parser by resetting the state context and current state
        /// </summary>
        private void AttemptPublishAndResetParser()
        {
            Logger.Info($"JestTestResultParser : Resetting the parser and attempting to publish the test run at line {_stateContext.CurrentLineNumber}.");
            var testRunToPublish = _stateContext.TestRun;

            // We have encountered passed test cases but no passed summary was encountered
            if (testRunToPublish.PassedTests.Count != 0 && testRunToPublish.TestRunSummary.TotalPassed == 0)
            {
                Logger.Error("JestTestResultParser : Passed tests were encountered but no passed summary was encountered.");
                Telemetry.AddAndAggregate(JestTelemetryConstants.PassedTestCasesFoundButNoPassedSummary,
                                          new List <int> {
                    _stateContext.TestRun.TestRunId
                }, JestTelemetryConstants.EventArea);
            }
            else if (_stateContext.VerboseOptionEnabled && testRunToPublish.TestRunSummary.TotalPassed != testRunToPublish.PassedTests.Count)
            {
                // If encountered failed tests does not match summary fire telemetry
                Logger.Error($"JestTestResultParser : Passed tests count does not match passed summary" +
                             $" at line {_stateContext.CurrentLineNumber}");
                Telemetry.AddAndAggregate(JestTelemetryConstants.PassedSummaryMismatch,
                                          new List <int> {
                    testRunToPublish.TestRunId
                }, JestTelemetryConstants.EventArea);
            }

            // We have encountered failed test cases but no failed summary was encountered
            if (testRunToPublish.FailedTests.Count != 0 && testRunToPublish.TestRunSummary.TotalFailed == 0)
            {
                Logger.Error("JestTestResultParser : Failed tests were encountered but no failed summary was encountered.");
                Telemetry.AddAndAggregate(JestTelemetryConstants.FailedTestCasesFoundButNoFailedSummary,
                                          new List <int> {
                    _stateContext.TestRun.TestRunId
                }, JestTelemetryConstants.EventArea);
            }
            else if (testRunToPublish.TestRunSummary.TotalFailed != testRunToPublish.FailedTests.Count)
            {
                // If encountered failed tests does not match summary fire telemtry
                Logger.Error($"JestTestResultParser : Failed tests count does not match failed summary" +
                             $" at line {_stateContext.CurrentLineNumber}");
                Telemetry.AddAndAggregate(JestTelemetryConstants.FailedSummaryMismatch,
                                          new List <int> {
                    testRunToPublish.TestRunId
                }, JestTelemetryConstants.EventArea);
            }

            // Ensure some summary data was detected before attempting a publish, ie. check if the state is not test results state
            switch (_currentState)
            {
            case JestParserStates.ExpectingTestRunStart:

                Logger.Error("JestTestResultParser : Skipping publish as no test cases or summary has been encountered.");

                break;

            case JestParserStates.ExpectingTestResults:

            case JestParserStates.ExpectingStackTraces:

                if (testRunToPublish.PassedTests.Count != 0 ||
                    testRunToPublish.FailedTests.Count != 0 ||
                    testRunToPublish.SkippedTests.Count != 0)
                {
                    Logger.Error("JestTestResultParser : Skipping publish as testcases were encountered but no summary was encountered.");
                    Telemetry.AddAndAggregate(JestTelemetryConstants.TestCasesFoundButNoSummary,
                                              new List <int> {
                        _stateContext.TestRun.TestRunId
                    }, JestTelemetryConstants.EventArea);
                }

                break;

            case JestParserStates.ExpectingTestRunSummary:

                if (testRunToPublish.TestRunSummary.TotalTests == 0)
                {
                    Logger.Error("JestTestResultParser : Skipping publish as total tests was 0.");
                    Telemetry.AddAndAggregate(JestTelemetryConstants.TotalTestsZero,
                                              new List <int> {
                        _stateContext.TestRun.TestRunId
                    }, JestTelemetryConstants.EventArea);
                    break;
                }

                if (testRunToPublish.TestRunSummary.TotalExecutionTime.TotalMilliseconds == 0)
                {
                    Logger.Error("JestTestResultParser : Total test run time was 0 or not encountered.");
                    Telemetry.AddAndAggregate(JestTelemetryConstants.TotalTestRunTimeZero,
                                              new List <int> {
                        _stateContext.TestRun.TestRunId
                    }, JestTelemetryConstants.EventArea);
                }

                // Trim the stack traces of extra newlines etc.
                foreach (var failedTest in testRunToPublish.FailedTests)
                {
                    if (failedTest.StackTrace != null)
                    {
                        failedTest.StackTrace = failedTest.StackTrace.TrimEnd();
                    }
                }

                // Only publish if total tests was not zero
                TestRunManager.PublishAsync(testRunToPublish);

                break;
            }

            ResetParser();
        }
        private void SaveRun(object sender, RoutedEventArgs e)
        {
            ICollection<TestCase> testCasesModelList = new Collection<TestCase>();
            this.TestCasesList.ToList().ForEach(x =>
            {
                testCasesModelList.Add(ModelConverter.TestCaseProxyToModel(x));
            });

            TestRunManager manager = new TestRunManager();
            manager.RelateTestCaseToTestRun(this.TestRunProxy.ID, testCasesModelList);

            this.CancelDialog();
        }
Exemplo n.º 20
0
 public void TestInitialize() => TestRunManager = new TestRunManager(TestContext.TestName);