/// <summary>
        /// Publish separate test run for each result file that has results.
        /// </summary>
        private async Task PublishToNewTestRunPerTestResultFileAsync(List <string> resultFiles,
                                                                     ITestRunPublisher publisher,
                                                                     TestRunContext runContext,
                                                                     string resultReader,
                                                                     int batchSize,
                                                                     CancellationToken cancellationToken)
        {
            try
            {
                var groupedFiles = resultFiles
                                   .Select((resultFile, index) => new { Index = index, file = resultFile })
                                   .GroupBy(pair => pair.Index / batchSize)
                                   .Select(bucket => bucket.Select(pair => pair.file).ToList())
                                   .ToList();

                bool changeTestRunTitle = resultFiles.Count > 1;

                foreach (var files in groupedFiles)
                {
                    // Publish separate test run for each result file that has results.
                    var publishTasks = files.Select(async resultFile =>
                    {
                        cancellationToken.ThrowIfCancellationRequested();
                        string runName = _runTitle;
                        if (!string.IsNullOrWhiteSpace(_runTitle) && changeTestRunTitle)
                        {
                            runName = GetRunTitle();
                        }

                        _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                        TestRunData testRunData = publisher.ReadResultsFromFile(runContext, resultFile, runName);

                        if (_failTaskOnFailedTests)
                        {
                            _isTestRunOutcomeFailed = GetTestRunOutcome(testRunData);
                        }

                        cancellationToken.ThrowIfCancellationRequested();

                        if (testRunData != null && testRunData.Results != null && testRunData.Results.Length > 0)
                        {
                            testRunData.AddCustomField(_testRunSystemCustomFieldName, _testRunSystem);
                            TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
                            await publisher.AddResultsAsync(testRun, testRunData.Results, _executionContext.CancellationToken);
                            await publisher.EndTestRunAsync(testRunData, testRun.Id, cancellationToken: _executionContext.CancellationToken);
                        }
                        else
                        {
                            _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                        }
                    });
                    await Task.WhenAll(publishTasks);
                }
            }
            catch (Exception ex) when(!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
        }
Exemple #2
0
 public TestRunDataPublisherHelper(IExecutionContext executionContext, ITestRunPublisher libraryTestRunPublisher, LegacyTestRunPublisher agentTestRunPublisher, ITestResultsServer testResultServer)
 {
     _executionContext        = executionContext;
     _libraryTestRunPublisher = libraryTestRunPublisher;
     _agentTestRunPublisher   = agentTestRunPublisher;
     _testResultsServer       = testResultServer;
 }
 public void InitializePublisher(IExecutionContext context, string projectName, VssConnection connection, string testRunner, bool publishRunLevelAttachments)
 {
     Trace.Entering();
     _executionContext = context;
     _projectName      = projectName;
     _resultReader     = GetTestResultReader(testRunner, publishRunLevelAttachments);
     _testRunPublisher = HostContext.GetService <ITestRunPublisher>();
     _testRunPublisher.InitializePublisher(_executionContext, connection, projectName, _resultReader);
     Trace.Leaving();
 }
 public BuildCreator(
     IConfigurationProvider configurationProvider, ITfsManualBuildCreator manualBuildCreator, 
     ITestRunPublisher testRunPublisher, IBuildTestResultPublisher buildTestResultsPublisher, IBuildInvoker buildInvoker)
 {
     _configurationProvider = configurationProvider;
     _manualBuildCreator = manualBuildCreator;
     _testRunPublisher = testRunPublisher;
     _buildTestResultsPublisher = buildTestResultsPublisher;
     _buildInvoker = buildInvoker;
 }
Exemple #5
0
 public BuildCreator(
     IConfigurationProvider configurationProvider, ITfsManualBuildCreator manualBuildCreator,
     ITestRunPublisher testRunPublisher, IBuildTestResultPublisher buildTestResultsPublisher, IBuildInvoker buildInvoker)
 {
     _configurationProvider     = configurationProvider;
     _manualBuildCreator        = manualBuildCreator;
     _testRunPublisher          = testRunPublisher;
     _buildTestResultsPublisher = buildTestResultsPublisher;
     _buildInvoker = buildInvoker;
 }
Exemple #6
0
 public void InitializePublisher(IExecutionContext context, string projectName, VssConnection connection, string testRunner, bool publishRunLevelAttachments)
 {
     Trace.Entering();
     _executionContext   = context;
     _projectName        = projectName;
     _testRunner         = testRunner;
     _resultReader       = GetTestResultReader(_testRunner, publishRunLevelAttachments);
     _testRunPublisher   = HostContext.GetService <ITestRunPublisher>();
     _featureFlagService = HostContext.GetService <IFeatureFlagService>();
     _testRunPublisher.InitializePublisher(_executionContext, connection, projectName, _resultReader);
     _calculateTestRunSummary = _featureFlagService.GetFeatureFlagState(TestResultsConstants.CalculateTestRunSummaryFeatureFlag, TestResultsConstants.TFSServiceInstanceGuid);
     Trace.Leaving();
 }
        public void InitializePublisher(IExecutionContext context, string projectName, VssConnection connection, string testRunner)
        {
            Trace.Entering();
            _executionContext = context;
            _projectName      = projectName;
            _connection       = connection;
            _testRunPublisher = new TestRunPublisher(connection, new CommandTraceListener(context));
            _testLogStore     = new TestLogStore(connection, new CommandTraceListener(context));

            var extensionManager = HostContext.GetService <IExtensionManager>();

            _parser = (extensionManager.GetExtensions <IParser>()).FirstOrDefault(x => testRunner.Equals(x.Name, StringComparison.OrdinalIgnoreCase));
            Trace.Leaving();
        }
        private void SetupMocks([CallerMemberName] string name = "")
        {
            TestHostContext hc = new TestHostContext(this, name);

            _ec = new Mock <IExecutionContext>();
            List <string> warnings;
            var           variables = new Variables(hc, new Dictionary <string, VariableValue>(), out warnings);

            _ec.Setup(x => x.Variables).Returns(variables);

            hc.SetSingleton <ITestResultsServer>(_testResultServer.Object);

            _publisher = new TestRunPublisher();
            _publisher.Initialize(hc);
            _publisher.InitializePublisher(_ec.Object, new VssConnection(new Uri("http://dummyurl"), new Common.VssCredentials()), "Project1", _reader.Object);
        }
        /// <summary>
        /// Publish single test run
        /// </summary>
        private async Task PublishAllTestResultsToSingleTestRunAsync(List<string> resultFiles, ITestRunPublisher publisher, int buildId, TestRunContext runContext, string resultReader, CancellationToken cancellationToken)
        {
            try
            {
                DateTime startTime = DateTime.Now; //use local time since TestRunData defaults to local times
                TimeSpan totalTestCaseDuration = TimeSpan.Zero;
                List<string> runAttachments = new List<string>();
                List<TestCaseResultData> runResults = new List<TestCaseResultData>();

                //read results from each file
                foreach (string resultFile in resultFiles)
                {
                    cancellationToken.ThrowIfCancellationRequested();
                    //test case results
                    _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                    TestRunData resultFileRunData = publisher.ReadResultsFromFile(runContext, resultFile);

                    if (resultFileRunData != null && resultFileRunData.Results != null && resultFileRunData.Results.Length > 0)
                    {
                        foreach (TestCaseResultData tcResult in resultFileRunData.Results)
                        {
                            int durationInMs = 0;
                            int.TryParse(tcResult.DurationInMs, out durationInMs);
                            totalTestCaseDuration = totalTestCaseDuration.Add(TimeSpan.FromMilliseconds(durationInMs));
                        }
                        runResults.AddRange(resultFileRunData.Results);

                        //run attachments
                        if (resultFileRunData.Attachments != null)
                        {
                            runAttachments.AddRange(resultFileRunData.Attachments);
                        }
                    }
                    else
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                    }
                }

                string runName = string.IsNullOrWhiteSpace(_runTitle)
                    ? StringUtil.Format("{0}_TestResults_{1}", _testRunner, buildId)
                    : _runTitle;

                //creat test run
                TestRunData testRunData = new TestRunData(
                    name: runName,
                    startedDate: startTime.ToString("o"),
                    completedDate: startTime.Add(totalTestCaseDuration).ToString("o"),
                    state: "InProgress",
                    isAutomated: true,
                    buildId: runContext != null ? runContext.BuildId : 0,
                    buildFlavor: runContext != null ? runContext.Configuration : string.Empty,
                    buildPlatform: runContext != null ? runContext.Platform : string.Empty
                    );

                testRunData.Attachments = runAttachments.ToArray();

                //publish run if there are results.
                if (runResults.Count > 0)
                {
                    TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
                    await publisher.AddResultsAsync(testRun, runResults.ToArray(), _executionContext.CancellationToken);
                    await publisher.EndTestRunAsync(testRunData, testRun.Id, true, _executionContext.CancellationToken);
                }
            }
            catch (Exception ex) when (!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
        }
Exemple #10
0
        /// <summary>
        /// Publish single test run
        /// </summary>
        private async Task <bool> PublishAllTestResultsToSingleTestRunAsync(List <string> resultFiles, ITestRunPublisher publisher, TestRunContext runContext, string resultReader, string runTitle, int?buildId, CancellationToken cancellationToken)
        {
            bool isTestRunOutcomeFailed = false;

            try
            {
                //use local time since TestRunData defaults to local times
                DateTime                  minStartDate          = DateTime.MaxValue;
                DateTime                  maxCompleteDate       = DateTime.MinValue;
                DateTime                  presentTime           = DateTime.UtcNow;
                bool                      dateFormatError       = false;
                TimeSpan                  totalTestCaseDuration = TimeSpan.Zero;
                List <string>             runAttachments        = new List <string>();
                List <TestCaseResultData> runResults            = new List <TestCaseResultData>();
                TestRunSummary            testRunSummary        = new TestRunSummary();
                //read results from each file
                foreach (string resultFile in resultFiles)
                {
                    cancellationToken.ThrowIfCancellationRequested();
                    //test case results
                    _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                    TestRunData resultFileRunData = publisher.ReadResultsFromFile(runContext, resultFile);
                    isTestRunOutcomeFailed = isTestRunOutcomeFailed || GetTestRunOutcome(resultFileRunData, testRunSummary);

                    if (resultFileRunData != null)
                    {
                        if (resultFileRunData.Results != null && resultFileRunData.Results.Length > 0)
                        {
                            try
                            {
                                if (string.IsNullOrEmpty(resultFileRunData.StartDate) || string.IsNullOrEmpty(resultFileRunData.CompleteDate))
                                {
                                    dateFormatError = true;
                                }

                                //As per discussion with Manoj(refer bug 565487): Test Run duration time should be minimum Start Time to maximum Completed Time when merging
                                if (!string.IsNullOrEmpty(resultFileRunData.StartDate))
                                {
                                    DateTime startDate = DateTime.Parse(resultFileRunData.StartDate, null, DateTimeStyles.RoundtripKind);
                                    minStartDate = minStartDate > startDate ? startDate : minStartDate;

                                    if (!string.IsNullOrEmpty(resultFileRunData.CompleteDate))
                                    {
                                        DateTime endDate = DateTime.Parse(resultFileRunData.CompleteDate, null, DateTimeStyles.RoundtripKind);
                                        maxCompleteDate = maxCompleteDate < endDate ? endDate : maxCompleteDate;
                                    }
                                }
                            }
                            catch (FormatException)
                            {
                                _executionContext.Warning(StringUtil.Loc("InvalidDateFormat", resultFile, resultFileRunData.StartDate, resultFileRunData.CompleteDate));
                                dateFormatError = true;
                            }

                            //continue to calculate duration as a fallback for case: if there is issue with format or dates are null or empty
                            foreach (TestCaseResultData tcResult in resultFileRunData.Results)
                            {
                                int durationInMs = Convert.ToInt32(tcResult.DurationInMs);
                                totalTestCaseDuration = totalTestCaseDuration.Add(TimeSpan.FromMilliseconds(durationInMs));
                            }

                            runResults.AddRange(resultFileRunData.Results);

                            //run attachments
                            if (resultFileRunData.Attachments != null)
                            {
                                runAttachments.AddRange(resultFileRunData.Attachments);
                            }
                        }
                        else
                        {
                            _executionContext.Output(StringUtil.Loc("NoResultFound", resultFile));
                        }
                    }
                    else
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                    }
                }

                //publish run if there are results.
                if (runResults.Count > 0)
                {
                    string runName = string.IsNullOrWhiteSpace(runTitle)
                    ? StringUtil.Format("{0}_TestResults_{1}", _resultReader.Name, buildId)
                    : runTitle;

                    if (DateTime.Compare(minStartDate, maxCompleteDate) > 0)
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidCompletedDate", maxCompleteDate, minStartDate));
                        dateFormatError = true;
                    }

                    minStartDate    = DateTime.Equals(minStartDate, DateTime.MaxValue) ? presentTime : minStartDate;
                    maxCompleteDate = dateFormatError || DateTime.Equals(maxCompleteDate, DateTime.MinValue) ? minStartDate.Add(totalTestCaseDuration) : maxCompleteDate;

                    // create test run
                    TestRunData testRunData = new TestRunData(
                        name: runName,
                        startedDate: minStartDate.ToString("o"),
                        completedDate: maxCompleteDate.ToString("o"),
                        state: "InProgress",
                        isAutomated: true,
                        buildId: runContext != null ? runContext.BuildId : 0,
                        buildFlavor: runContext != null ? runContext.Configuration : string.Empty,
                        buildPlatform: runContext != null ? runContext.Platform : string.Empty,
                        releaseUri: runContext != null ? runContext.ReleaseUri : null,
                        releaseEnvironmentUri: runContext != null ? runContext.ReleaseEnvironmentUri : null
                        );
                    testRunData.PipelineReference = runContext.PipelineReference;
                    testRunData.Attachments       = runAttachments.ToArray();
                    testRunData.AddCustomField(_testRunSystemCustomFieldName, runContext.TestRunSystem);
                    AddTargetBranchInfoToRunCreateModel(testRunData, runContext.TargetBranchName);

                    TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);

                    await publisher.AddResultsAsync(testRun, runResults.ToArray(), _executionContext.CancellationToken);

                    TestRun updatedRun = await publisher.EndTestRunAsync(testRunData, testRun.Id, true, _executionContext.CancellationToken);

                    // Check failed results for flaky aware
                    // Fallback to flaky aware if there are any failures.
                    bool isFlakyCheckEnabled = _featureFlagService.GetFeatureFlagState(TestResultsConstants.EnableFlakyCheckInAgentFeatureFlag, TestResultsConstants.TCMServiceInstanceGuid);

                    if (isTestRunOutcomeFailed && isFlakyCheckEnabled)
                    {
                        IList <TestRun> publishedRuns = new List <TestRun>();
                        publishedRuns.Add(updatedRun);
                        var runOutcome = _testRunPublisherHelper.CheckRunsForFlaky(publishedRuns, _projectName);
                        if (runOutcome != null && runOutcome.HasValue)
                        {
                            isTestRunOutcomeFailed = runOutcome.Value;
                        }
                    }

                    StoreTestRunSummaryInEnvVar(testRunSummary);
                }
            }
            catch (Exception ex) when(!(ex is OperationCanceledException && _executionContext.CancellationToken.IsCancellationRequested))
            {
                // Not catching all the operationcancelled exceptions, as the pipeline cancellation should cancel the command as well.
                // Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
            return(isTestRunOutcomeFailed);
        }
        private void SetupMocks([CallerMemberName] string name = "")
        {
            TestHostContext hc = new TestHostContext(this, name);
            _ec = new Mock<IExecutionContext>();
            List<string> warnings;
            var variables = new Variables(hc, new Dictionary<string, string>(), new List<MaskHint>(), out warnings);
            _ec.Setup(x => x.Variables).Returns(variables);

            hc.SetSingleton<ITestResultsServer>(_testResultServer.Object);

            _publisher = new TestRunPublisher();
            _publisher.Initialize(hc);
            _publisher.InitializePublisher(_ec.Object, new Client.VssConnection(new Uri("http://dummyurl"), new Common.VssCredentials()), "Project1", _reader.Object);
        }
Exemple #12
0
        /// <summary>
        /// Publish single test run
        /// </summary>
        private async Task PublishAllTestResultsToSingleTestRunAsync(List <string> resultFiles, ITestRunPublisher publisher, int buildId, TestRunContext runContext, string resultReader, CancellationToken cancellationToken)
        {
            try
            {
                //use local time since TestRunData defaults to local times
                DateTime                  minStartDate          = DateTime.MaxValue;
                DateTime                  maxCompleteDate       = DateTime.MinValue;
                DateTime                  presentTime           = DateTime.UtcNow;
                bool                      dateFormatError       = false;
                TimeSpan                  totalTestCaseDuration = TimeSpan.Zero;
                List <string>             runAttachments        = new List <string>();
                List <TestCaseResultData> runResults            = new List <TestCaseResultData>();

                //read results from each file
                foreach (string resultFile in resultFiles)
                {
                    cancellationToken.ThrowIfCancellationRequested();
                    //test case results
                    _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                    TestRunData resultFileRunData = publisher.ReadResultsFromFile(runContext, resultFile);

                    if (_failTaskOnFailedTests)
                    {
                        _isTestRunOutcomeFailed = _isTestRunOutcomeFailed || GetTestRunOutcome(resultFileRunData);
                    }

                    if (resultFileRunData != null && resultFileRunData.Results != null && resultFileRunData.Results.Length > 0)
                    {
                        try
                        {
                            if (string.IsNullOrEmpty(resultFileRunData.StartDate) || string.IsNullOrEmpty(resultFileRunData.CompleteDate))
                            {
                                dateFormatError = true;
                            }

                            //As per discussion with Manoj(refer bug 565487): Test Run duration time should be minimum Start Time to maximum Completed Time when merging
                            if (!string.IsNullOrEmpty(resultFileRunData.StartDate))
                            {
                                DateTime startDate = DateTime.Parse(resultFileRunData.StartDate, null, DateTimeStyles.RoundtripKind);
                                minStartDate = minStartDate > startDate ? startDate : minStartDate;

                                if (!string.IsNullOrEmpty(resultFileRunData.CompleteDate))
                                {
                                    DateTime endDate = DateTime.Parse(resultFileRunData.CompleteDate, null, DateTimeStyles.RoundtripKind);
                                    maxCompleteDate = maxCompleteDate < endDate ? endDate : maxCompleteDate;
                                }
                            }
                        }
                        catch (FormatException)
                        {
                            _executionContext.Warning(StringUtil.Loc("InvalidDateFormat", resultFile, resultFileRunData.StartDate, resultFileRunData.CompleteDate));
                            dateFormatError = true;
                        }

                        //continue to calculate duration as a fallback for case: if there is issue with format or dates are null or empty
                        foreach (TestCaseResultData tcResult in resultFileRunData.Results)
                        {
                            int durationInMs = Convert.ToInt32(tcResult.DurationInMs);
                            totalTestCaseDuration = totalTestCaseDuration.Add(TimeSpan.FromMilliseconds(durationInMs));
                        }

                        runResults.AddRange(resultFileRunData.Results);

                        //run attachments
                        if (resultFileRunData.Attachments != null)
                        {
                            runAttachments.AddRange(resultFileRunData.Attachments);
                        }
                    }
                    else
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                    }
                }

                //publish run if there are results.
                if (runResults.Count > 0)
                {
                    string runName = string.IsNullOrWhiteSpace(_runTitle)
                    ? StringUtil.Format("{0}_TestResults_{1}", _testRunner, buildId)
                    : _runTitle;

                    if (DateTime.Compare(minStartDate, maxCompleteDate) > 0)
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidCompletedDate", maxCompleteDate, minStartDate));
                        dateFormatError = true;
                    }

                    minStartDate    = DateTime.Equals(minStartDate, DateTime.MaxValue) ? presentTime : minStartDate;
                    maxCompleteDate = dateFormatError || DateTime.Equals(maxCompleteDate, DateTime.MinValue) ? minStartDate.Add(totalTestCaseDuration) : maxCompleteDate;

                    //creat test run
                    TestRunData testRunData = new TestRunData(
                        name: runName,
                        startedDate: minStartDate.ToString("o"),
                        completedDate: maxCompleteDate.ToString("o"),
                        state: "InProgress",
                        isAutomated: true,
                        buildId: runContext != null ? runContext.BuildId : 0,
                        buildFlavor: runContext != null ? runContext.Configuration : string.Empty,
                        buildPlatform: runContext != null ? runContext.Platform : string.Empty,
                        releaseUri: runContext != null ? runContext.ReleaseUri : null,
                        releaseEnvironmentUri: runContext != null ? runContext.ReleaseEnvironmentUri : null
                        );

                    testRunData.Attachments = runAttachments.ToArray();
                    testRunData.AddCustomField(_testRunSystemCustomFieldName, _testRunSystem);
                    AddTargetBranchInfoToRunCreateModel(testRunData, runContext.PullRequestTargetBranchName);

                    TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);

                    await publisher.AddResultsAsync(testRun, runResults.ToArray(), _executionContext.CancellationToken);

                    await publisher.EndTestRunAsync(testRunData, testRun.Id, true, _executionContext.CancellationToken);
                }
            }
            catch (Exception ex) when(!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
        }
Exemple #13
0
        /// <summary>
        /// Publish separate test run for each result file that has results.
        /// </summary>
        private async Task <bool> PublishToNewTestRunPerTestResultFileAsync(List <string> resultFiles,
                                                                            ITestRunPublisher publisher,
                                                                            TestRunContext runContext,
                                                                            string resultReader,
                                                                            string runTitle,
                                                                            int batchSize,
                                                                            CancellationToken cancellationToken)
        {
            bool isTestRunOutcomeFailed = false;

            try
            {
                IList <TestRun> publishedRuns = new List <TestRun>();
                var             groupedFiles  = resultFiles
                                                .Select((resultFile, index) => new { Index = index, file = resultFile })
                                                .GroupBy(pair => pair.Index / batchSize)
                                                .Select(bucket => bucket.Select(pair => pair.file).ToList())
                                                .ToList();

                bool           changeTestRunTitle = resultFiles.Count > 1;
                TestRunSummary testRunSummary     = new TestRunSummary();
                foreach (var files in groupedFiles)
                {
                    // Publish separate test run for each result file that has results.
                    var publishTasks = files.Select(async resultFile =>
                    {
                        cancellationToken.ThrowIfCancellationRequested();

                        string runName = runTitle;
                        if (!string.IsNullOrWhiteSpace(runTitle) && changeTestRunTitle)
                        {
                            runName = GetRunName(runTitle);
                        }

                        _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                        TestRunData testRunData       = publisher.ReadResultsFromFile(runContext, resultFile, runName);
                        testRunData.PipelineReference = runContext.PipelineReference;

                        isTestRunOutcomeFailed = isTestRunOutcomeFailed || GetTestRunOutcome(testRunData, testRunSummary);

                        cancellationToken.ThrowIfCancellationRequested();

                        if (testRunData != null)
                        {
                            if (testRunData.Results != null && testRunData.Results.Length > 0)
                            {
                                testRunData.AddCustomField(_testRunSystemCustomFieldName, runContext.TestRunSystem);
                                AddTargetBranchInfoToRunCreateModel(testRunData, runContext.TargetBranchName);
                                TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
                                await publisher.AddResultsAsync(testRun, testRunData.Results, _executionContext.CancellationToken);
                                TestRun updatedRun = await publisher.EndTestRunAsync(testRunData, testRun.Id, cancellationToken: _executionContext.CancellationToken);

                                publishedRuns.Add(updatedRun);
                            }
                            else
                            {
                                _executionContext.Output(StringUtil.Loc("NoResultFound", resultFile));
                            }
                        }
                        else
                        {
                            _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                        }
                    });
                    await Task.WhenAll(publishTasks);
                }

                // Check failed results for flaky aware
                // Fallback to flaky aware if there are any failures.
                bool isFlakyCheckEnabled = _featureFlagService.GetFeatureFlagState(TestResultsConstants.EnableFlakyCheckInAgentFeatureFlag, TestResultsConstants.TCMServiceInstanceGuid);

                if (isTestRunOutcomeFailed && isFlakyCheckEnabled)
                {
                    var runOutcome = _testRunPublisherHelper.CheckRunsForFlaky(publishedRuns, _projectName);
                    if (runOutcome != null && runOutcome.HasValue)
                    {
                        isTestRunOutcomeFailed = runOutcome.Value;
                    }
                }

                StoreTestRunSummaryInEnvVar(testRunSummary);
            }
            catch (Exception ex) when(!(ex is OperationCanceledException && _executionContext.CancellationToken.IsCancellationRequested))
            {
                // Not catching all the operationcancelled exceptions, as the pipeline cancellation should cancel the command as well.
                // Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
            return(isTestRunOutcomeFailed);
        }
 /// <summary>
 /// Construct the TestRunManger
 /// </summary>
 public TestRunManager(ITestRunPublisher testRunPublisher, ITraceLogger logger)
 {
     _publisher = testRunPublisher;
     _logger    = logger;
 }
        /// <summary>
        /// Publish separate test run for each result file that has results.
        /// </summary>
        private async Task PublishToNewTestRunPerTestResultFileAsync(List <string> resultFiles, ITestRunPublisher publisher, TestRunContext runContext, string resultReader, CancellationToken cancellationToken)
        {
            try
            {
                // Publish separate test run for each result file that has results.
                var publishTasks = resultFiles.Select(async resultFile =>
                {
                    cancellationToken.ThrowIfCancellationRequested();
                    string runName = null;
                    if (!string.IsNullOrWhiteSpace(_runTitle))
                    {
                        runName = GetRunTitle();
                    }

                    _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                    TestRunData testRunData = publisher.ReadResultsFromFile(runContext, resultFile, runName);

                    cancellationToken.ThrowIfCancellationRequested();

                    if (testRunData != null && testRunData.Results != null && testRunData.Results.Length > 0)
                    {
                        TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
                        await publisher.AddResultsAsync(testRun, testRunData.Results, _executionContext.CancellationToken);
                        await publisher.EndTestRunAsync(testRunData, testRun.Id, cancellationToken: _executionContext.CancellationToken);
                    }
                    else
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                    }
                });
                await Task.WhenAll(publishTasks);
            }
            catch (Exception ex) when(!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
        }
 /// <summary>
 /// Construct the TestRunManger
 /// </summary>
 public TestRunManager(ITestRunPublisher testRunPublisher, ITraceLogger logger, ITelemetryDataCollector telemetry)
 {
     _publisher = testRunPublisher;
     _logger    = logger;
     _telemetry = telemetry;
 }
        /// <summary>
        /// Publish single test run
        /// </summary>
        private async Task PublishAllTestResultsToSingleTestRunAsync(List <string> resultFiles, ITestRunPublisher publisher, int buildId, TestRunContext runContext, string resultReader, CancellationToken cancellationToken)
        {
            try
            {
                DateTime                  startTime             = DateTime.Now; //use local time since TestRunData defaults to local times
                TimeSpan                  totalTestCaseDuration = TimeSpan.Zero;
                List <string>             runAttachments        = new List <string>();
                List <TestCaseResultData> runResults            = new List <TestCaseResultData>();

                //read results from each file
                foreach (string resultFile in resultFiles)
                {
                    cancellationToken.ThrowIfCancellationRequested();
                    //test case results
                    _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                    TestRunData resultFileRunData = publisher.ReadResultsFromFile(runContext, resultFile);

                    if (resultFileRunData != null && resultFileRunData.Results != null && resultFileRunData.Results.Length > 0)
                    {
                        foreach (TestCaseResultData tcResult in resultFileRunData.Results)
                        {
                            int durationInMs = Convert.ToInt32(tcResult.DurationInMs);
                            totalTestCaseDuration = totalTestCaseDuration.Add(TimeSpan.FromMilliseconds(durationInMs));
                        }
                        runResults.AddRange(resultFileRunData.Results);

                        //run attachments
                        if (resultFileRunData.Attachments != null)
                        {
                            runAttachments.AddRange(resultFileRunData.Attachments);
                        }
                    }
                    else
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                    }
                }

                string runName = string.IsNullOrWhiteSpace(_runTitle)
                    ? StringUtil.Format("{0}_TestResults_{1}", _testRunner, buildId)
                    : _runTitle;

                //creat test run
                TestRunData testRunData = new TestRunData(
                    name: runName,
                    startedDate: startTime.ToString("o"),
                    completedDate: startTime.Add(totalTestCaseDuration).ToString("o"),
                    state: "InProgress",
                    isAutomated: true,
                    buildId: runContext != null ? runContext.BuildId : 0,
                    buildFlavor: runContext != null ? runContext.Configuration : string.Empty,
                    buildPlatform: runContext != null ? runContext.Platform : string.Empty
                    );

                testRunData.Attachments = runAttachments.ToArray();

                //publish run if there are results.
                if (runResults.Count > 0)
                {
                    TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);

                    await publisher.AddResultsAsync(testRun, runResults.ToArray(), _executionContext.CancellationToken);

                    await publisher.EndTestRunAsync(testRunData, testRun.Id, true, _executionContext.CancellationToken);
                }
            }
            catch (Exception ex) when(!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
        }
Exemple #18
0
 /// <summary>
 /// Construct the TestRunManger
 /// </summary>
 /// <param name="testRunPublisher"></param>
 public TestRunManager(ITestRunPublisher testRunPublisher, IDiagnosticDataCollector diagnosticDataCollector, ITelemetryDataCollector telemetryDataCollector)
 {
     this.publisher = testRunPublisher;
     this.diagnosticDataCollector = diagnosticDataCollector;
     this.telemetryDataCollector  = telemetryDataCollector;
 }
        /// <summary>
        /// Publish separate test run for each result file that has results.
        /// </summary>
        private async Task PublishToNewTestRunPerTestResultFileAsync(List<string> resultFiles, ITestRunPublisher publisher, TestRunContext runContext, string resultReader, CancellationToken cancellationToken)
        {
            try
            {
                // Publish separate test run for each result file that has results.
                var publishTasks = resultFiles.Select(async resultFile =>
                {
                    cancellationToken.ThrowIfCancellationRequested();
                    string runName = null;
                    if (!string.IsNullOrWhiteSpace(_runTitle))
                    {
                        runName = GetRunTitle();
                    }

                    _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                    TestRunData testRunData = publisher.ReadResultsFromFile(runContext, resultFile, runName);

                    cancellationToken.ThrowIfCancellationRequested();

                    if (testRunData != null && testRunData.Results != null && testRunData.Results.Length > 0)
                    {
                        TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
                        await publisher.AddResultsAsync(testRun, testRunData.Results, _executionContext.CancellationToken);
                        await publisher.EndTestRunAsync(testRunData, testRun.Id, cancellationToken: _executionContext.CancellationToken);
                    }
                    else
                    {
                        _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                    }
                });
                await Task.WhenAll(publishTasks);
            }
            catch (Exception ex) when (!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
        }
Exemple #20
0
        /// <summary>
        /// Publish separate test run for each result file that has results.
        /// </summary>
        private async Task <bool> PublishToNewTestRunPerTestResultFileAsync(List <string> resultFiles,
                                                                            ITestRunPublisher publisher,
                                                                            TestRunContext runContext,
                                                                            string resultReader,
                                                                            string runTitle,
                                                                            int batchSize,
                                                                            CancellationToken cancellationToken)
        {
            bool isTestRunOutcomeFailed = false;

            try
            {
                var groupedFiles = resultFiles
                                   .Select((resultFile, index) => new { Index = index, file = resultFile })
                                   .GroupBy(pair => pair.Index / batchSize)
                                   .Select(bucket => bucket.Select(pair => pair.file).ToList())
                                   .ToList();

                bool           changeTestRunTitle = resultFiles.Count > 1;
                TestRunSummary testRunSummary     = new TestRunSummary();

                foreach (var files in groupedFiles)
                {
                    // Publish separate test run for each result file that has results.
                    var publishTasks = files.Select(async resultFile =>
                    {
                        cancellationToken.ThrowIfCancellationRequested();

                        string runName = runTitle;
                        if (!string.IsNullOrWhiteSpace(runTitle) && changeTestRunTitle)
                        {
                            runName = GetRunName(runTitle);
                        }

                        _executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
                        TestRunData testRunData       = publisher.ReadResultsFromFile(runContext, resultFile, runName);
                        testRunData.PipelineReference = runContext.PipelineReference;

                        isTestRunOutcomeFailed = isTestRunOutcomeFailed || GetTestRunOutcome(testRunData, testRunSummary);

                        cancellationToken.ThrowIfCancellationRequested();

                        if (testRunData != null)
                        {
                            if (testRunData.Results != null && testRunData.Results.Length > 0)
                            {
                                testRunData.AddCustomField(_testRunSystemCustomFieldName, runContext.TestRunSystem);
                                AddTargetBranchInfoToRunCreateModel(testRunData, runContext.TargetBranchName);
                                TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
                                await publisher.AddResultsAsync(testRun, testRunData.Results, _executionContext.CancellationToken);
                                await publisher.EndTestRunAsync(testRunData, testRun.Id, cancellationToken: _executionContext.CancellationToken);
                            }
                            else
                            {
                                _executionContext.Output(StringUtil.Loc("NoResultFound", resultFile));
                            }
                        }
                        else
                        {
                            _executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
                        }
                    });
                    await Task.WhenAll(publishTasks);
                }

                StoreTestRunSummaryInEnvVar(testRunSummary);
            }
            catch (Exception ex) when(!(ex is OperationCanceledException))
            {
                //Do not fail the task.
                LogPublishTestResultsFailureWarning(ex);
            }
            return(isTestRunOutcomeFailed);
        }