Example #1
0
 public override void Execute(TestRunContext cx)
 {
     using (cx.CreateSystemDataCollector())
     {
         cx.Stopwatch.Start();
         Diagnostics.GlobalLogger.TraceInformation("some text");
         cx.Stopwatch.Stop();
     }
 }
Example #2
0
 public override void Execute(TestRunContext cx)
 {
     using (cx.CreateSystemDataCollector())
     {
         cx.Stopwatch.Start();
         Diagnostics.WriteRawLine("Diagnostics.WriteRawLine()");
         cx.Stopwatch.Stop();
     }
 }
Example #3
0
 public override void Execute(TestRunContext cx)
 {
     using (cx.CreateSystemDataCollector())
     {
         cx.Stopwatch.Start();
         Log.Info("some text (log4net)");
         cx.Stopwatch.Stop();
     }
 }
Example #4
0
 public Hooks(TestRunContext testRunContext)
 {
     _testRunContext = testRunContext;
 }
 public BrowserDriver(BrowserSeleniumDriverFactory browserSeleniumDriverFactory, TestRunContext testRunContext)
 {
     _browserSeleniumDriverFactory = browserSeleniumDriverFactory;
     _testRunContext       = testRunContext;
     _currentWebDriverLazy = new Lazy <IWebDriver>(GetWebDriver);
 }
 public DistributeSwagToAttendeesSteps(TestRunContext context)
 {
     _context          = context;
     _swagEmController = new SwagEmController(_context.MockAttendeeService.Object, _context.MockSwagService.Object);
 }
Example #7
0
        private async Task RunMappedTest(TestCase testCase, DiscoveredTestData testData, TestRunContext testRunContext, StepBinder stepBinder, IFrameworkHandle frameworkHandle)
        {
            frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Starting test \"{testCase.DisplayName}\"");

            frameworkHandle.RecordStart(testCase);

            var executor = new StepsExecutor(stepBinder);

            // Deliberately resume on same context to try to avoid Visual Studio Test Explorer "bug" (?) that doesn't
            // always detect the end of the test run when multiple tests are run in parallel.
            var testResult = await executor
                             .Execute(testCase, testData, testRunContext, frameworkHandle)
                             .ConfigureAwait(true);

            // https://github.com/Microsoft/vstest/blob/master/src/Microsoft.TestPlatform.CrossPlatEngine/Adapter/TestExecutionRecorder.cs <- comments here seem to suggest that we need to call RecordEnd just before RecordResult
            frameworkHandle.RecordEnd(testCase, testResult.Outcome);
            frameworkHandle.RecordResult(testResult);

            frameworkHandle.SendMessage(TestMessageLevel.Informational, $"Finished test \"{testCase.DisplayName}\"");
        }
 public BrowserSeleniumDriverFactory(TestRunContext testRunContext, WebServerDriver webServerDriver)
 {
     _testRunContext  = testRunContext;
     _webServerDriver = webServerDriver;
 }
        public static void Teardown(TestRunContext testRunContext)
        {
            var typedProvider = (ServiceProvider)testRunContext.ServiceProvider;

            typedProvider.Dispose();
        }
 public WebDriverFactory(ConfigurationDriver configurationDriver, TestRunContext testRunContext)
 {
     _configurationDriver = configurationDriver;
     _textRunContext      = testRunContext;
 }
 public LoginDriver(BrowserDriver browserDriver, TestRunContext testRunContext)
 {
     _browserDriver  = browserDriver;
     _testRunContext = testRunContext;
 }
Example #12
0
 public SwagEmPage(TestRunContext context, IWebDriver driver)
 {
     _context = context;
     _driver  = driver;
 }
Example #13
0
        private TestRunData GetTestRunData(string trxContents, TrxResultReader myReader = null, TestRunContext trContext = null)
        {
            _trxResultFile = "results.trx";
            File.WriteAllText(_trxResultFile, trxContents);
            var reader  = myReader ?? new TrxResultReader();
            var runData = reader.ReadResults(_ec.Object, _trxResultFile,
                                             trContext ?? new TestRunContext(null, null, null, 1, null, null, null));

            return(runData);
        }
Example #14
0
        private TestRunData GetTestRunDataWithAttachments(int val, TrxResultReader myReader = null, TestRunContext trContext = null)
        {
            var trxContents = "<?xml version = \"1.0\" encoding = \"UTF-8\"?>" +
                              "<TestRun id = \"ee3d8b3b-1ac9-4a7e-abfa-3d3ed2008613\" name = \"kaadhina@KAADHINA1 2015-03-20 16:53:32\" runUser = \"FAREAST\\kaadhina\" xmlns =\"http://microsoft.com/schemas/VisualStudio/TeamTest/2010\"><Times creation = \"2015-03-20T16:53:32.3309380+05:30\" queuing = \"2015-03-20T16:53:32.3319381+05:30\" start = \"2015-03-20T16:53:32.3349628+05:30\" finish = \"2015-03-20T16:53:32.9232329+05:30\" />" +

                              "<TestDefinitions>" +
                              "<UnitTest name = \"TestMethod2\" storage = \"c:\\users\\kaadhina\\source\\repos\\projectx\\unittestproject4\\unittestproject4\\bin\\debug\\unittestproject4.dll\" priority = \"1\" id = \"f0d6b58f-dc08-9c0b-aab7-0a1411d4a346\"><Owners><Owner name = \"asdf2\" /></Owners><Execution id = \"48ec1e47-b9df-43b9-aef2-a2cc8742353d\" /><TestMethod codeBase = \"C:\\Users\\kaadhina\\Source\\Repos\\Projectx\\UnitTestProject4\\UnitTestProject4\\bin\\Debug\\UnitTestProject4.dll\" adapterTypeName = \"Microsoft.VisualStudio.TestTools.TestTypes.Unit.UnitTestAdapter\" className = \"UnitTestProject4.UnitTest1\" name = \"TestMethod2\" /></UnitTest>" +
                              "<WebTest name=\"PSD_Startseite\" storage=\"c:\\vsoagent\\a284d2cc\\vseqa1\\psd_startseite.webtest\" id=\"01da1a13-b160-4ee6-9d84-7a6dfe37b1d2\" persistedWebTest=\"7\"><TestCategory><TestCategoryItem TestCategory=\"PSD\" /></TestCategory><Execution id=\"eb421c16-4546-435a-9c24-0d2878ea76d4\" /></WebTest>" +
                              "</TestDefinitions>" +

                              "<TestSettings name=\"TestSettings1\" id=\"e9d264e9-30da-48df-aa95-c6b53f699464\"><Description>These are default test settings for a local test run.</Description>" +
                              "<Execution>" +
                              "<AgentRule name=\"LocalMachineDefaultRole\">" +
                              "<DataCollectors>" +
                              "<DataCollector uri=\"datacollector://microsoft/CodeCoverage/1.0\" assemblyQualifiedName=\"Microsoft.VisualStudio.TestTools.CodeCoverage.CoveragePlugIn, Microsoft.VisualStudio.QualityTools.Plugins.CodeCoverage, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a\" friendlyName=\"Code Coverage (Visual Studio 2010)\">" +
                              "<Configuration><CodeCoverage xmlns=\"\"><Regular>" +
                              "<CodeCoverageItem binaryFile=\"C:\\mstest.static.UnitTestProject3.dll\" pdbFile=\"C:\\mstest.static.UnitTestProject3.instr.pdb\" instrumentInPlace=\"true\" />" +
                              "</Regular></CodeCoverage></Configuration>" +
                              "</DataCollector>" +
                              "</DataCollectors>" +
                              "</AgentRule>" +
                              "</Execution>" +
                              "</TestSettings>" +


                              "{0}" +
                              "{1}" +

                              "<ResultSummary outcome=\"Failed\"><Counters total = \"2\" executed = \"2\" passed=\"1\" failed=\"1\" error=\"0\" timeout=\"0\" aborted=\"0\" inconclusive=\"0\" passedButRunAborted=\"0\" notRunnable=\"0\" notExecuted=\"0\" disconnected=\"0\" warning=\"0\" completed=\"0\" inProgress=\"0\" pending=\"0\" />" +

                              "{2}" +

                              "</ResultSummary>" +
                              "</TestRun>";

            var part0 = "<Results>" +
                        "<UnitTestResult executionId = \"48ec1e47-b9df-43b9-aef2-a2cc8742353d\" testId = \"f0d6b58f-dc08-9c0b-aab7-0a1411d4a346\" testName = \"TestMethod2\" computerName = \"KAADHINA1\" duration = \"00:00:00.0834563\" startTime = \"2015-03-20T16:53:32.3099353+05:30\" endTime = \"2015-03-20T16:53:32.3939623+05:30\" testType = \"13cdc9d9-ddb5-4fa4-a97d-d965ccfc6d4b\" outcome = \"Failed\" testListId = \"8c84fa94-04c1-424b-9868-57a2d4851a1d\" relativeResultsDirectory = \"48ec1e47-b9df-43b9-aef2-a2cc8742353d\" ><Output><ErrorInfo><Message>Assert.Fail failed.</Message><StackTrace>at UnitTestProject4.UnitTest1.TestMethod2() in C:\\Users\\kaadhina\\Source\\Repos\\Projectx\\UnitTestProject4\\UnitTestProject4\\UnitTest1.cs:line 21</StackTrace></ErrorInfo></Output>" +
                        "<ResultFiles><ResultFile path=\"DIGANR-DEV4\\x.txt\" /></ResultFiles>" +
                        "</UnitTestResult>" +

                        "<WebTestResult executionId=\"eb421c16-4546-435a-9c24-0d2878ea76d4\" testId=\"01da1a13-b160-4ee6-9d84-7a6dfe37b1d2\" testName=\"PSD_Startseite\" computerName=\"LAB-BUILDVNEXT\" duration=\"00:00:01.6887389\" startTime=\"2015-05-20T18:53:51.1063165+00:00\" endTime=\"2015-05-20T18:54:03.9160742+00:00\" testType=\"4e7599fa-5ecb-43e9-a887-cd63cf72d207\" outcome=\"Passed\" testListId=\"8c84fa94-04c1-424b-9868-57a2d4851a1d\" relativeResultsDirectory=\"eb421c16-4546-435a-9c24-0d2878ea76d4\">" +
                        "<ResultFiles>" +
                        "<ResultFile path=\"PSD_Startseite.webtestResult\" />" +
                        "</ResultFiles>" +
                        "<WebTestResultFilePath>LOCAL SERVICE_LAB-BUILDVNEXT 2015-05-20 18_53_41\\In\\eb421c16-4546-435a-9c24-0d2878ea76d4\\PSD_Startseite.webtestResult</WebTestResultFilePath>" +
                        "</WebTestResult>" +
                        "</Results>";

            var part1 =
                "<CollectorDataEntries>" +
                "<Collector agentName=\"DIGANR-DEV4\" uri=\"datacollector://microsoft/CodeCoverage/2.0\" collectorDisplayName=\"Code Coverage\"><UriAttachments><UriAttachment>" +
                "<A href=\"DIGANR-DEV4\\vstest_console.dynamic.data.coverage\"></A></UriAttachment></UriAttachments>" +
                "</Collector>" +
                "<Collector agentName=\"DIGANR-DEV4\" uri=\"datacollector://microsoft/CodeCoverage/1.0\" collectorDisplayName=\"MSTestAdapter\"><UriAttachments>" +
                "<UriAttachment><A href=\"DIGANR-DEV4\\unittestproject3.dll\">c:\\vstest.static.unittestproject3.dll</A></UriAttachment>" +
                "<UriAttachment><A href=\"DIGANR-DEV4\\UnitTestProject3.instr.pdb\">C:\\vstest.static.UnitTestProject3.instr.pdb</A></UriAttachment>" +
                "</UriAttachments></Collector>" +
                "</CollectorDataEntries>";

            var part2 = "<ResultFiles>" +
                        "<ResultFile path=\"vstest_console.static.data.coverage\" /></ResultFiles>" +
                        "<ResultFile path=\"DIGANR-DEV4\\mstest.static.data.coverage\" />";

            switch (val)
            {
            case 0:
                trxContents = string.Format(trxContents, part0, string.Empty, string.Empty);
                break;

            case 1:
                trxContents = string.Format(trxContents, string.Empty, part1, string.Empty);
                break;

            case 2:
                trxContents = string.Format(trxContents, string.Empty, string.Empty, part2);
                break;

            case 3:
                trxContents = string.Format(trxContents, string.Empty, string.Empty, string.Empty);
                break;

            default:
                trxContents = string.Format(trxContents, part0, part1, part2);
                break;
            }

            return(GetTestRunData(trxContents, myReader, trContext));
        }
 public GetUsersSteps(TestRunContext testRunContext)
 {
     _testRunContext = testRunContext;
 }
 public BrowserSeleniumDriverFactory(ConfigurationDriver configurationDriver, TestRunContext testRunContext)
 {
     _configurationDriver = configurationDriver;
     _testRunContext      = testRunContext;
 }
 public BrowserSeleniumDriverFactory(TestRunContext testRunContext)
 {
     _testRunContext = testRunContext;
 }
 public MessageTemplatesRegistry(TestRunContext testRunContext)
 {
     _testRunContext = testRunContext;
 }
Example #19
0
        public async Task <TestResult> Execute(TestCase testCase, DiscoveredTestData testData, TestRunContext testRunContext, IMessageLogger logger)
        {
            const double SmallestTimeRecognisedByTestRunnerInSeconds = 0.0005;

            var testResult = new TestResult(testCase);
            var startTicks = Stopwatch.GetTimestamp();

            try
            {
                (testRunContext.Logger as TestLogAccessor)?.SetCurrentTestResult(testResult);

                if (testData == null)
                {
                    throw new ArgumentNullException(nameof(testData));
                }

                var hasAnySteps = testData.Feature.Background.Steps
                                  .Concat(testData.Scenario.Steps)
                                  .Concat(testData.Rule?.Background?.Steps ?? Enumerable.Empty <IStep>())
                                  .Any();

                if (!hasAnySteps)
                {
                    MarkTestAsSkipped(testResult);
                }
                else
                {
                    Localisation.SetUICultureFromTag(testData.Feature.Tags);

                    using (var serviceScope = testRunContext.ServiceProvider.CreateScope())
                    {
                        // Before Scenario hooks should run here (see https://docs.cucumber.io/gherkin/reference/#background)
                        // > A Background is run before each scenario, but after any Before hooks. In your feature file, put the Background before the first Scenario.

                        IEnumerable <IStep> allScenarioSteps = testData.Feature.Background.Steps;

                        if (testData.Rule != null)
                        {
                            allScenarioSteps = allScenarioSteps.Concat(testData.Rule.Background.Steps);
                        }

                        allScenarioSteps = allScenarioSteps.Concat(testData.Scenario.Steps);

                        await ExecuteSteps(serviceScope.ServiceProvider, testResult, allScenarioSteps, testData, testRunContext)
                        .ConfigureAwait(false);
                    }

                    testResult.Outcome = TestOutcome.Passed;
                }

                testResult.Duration = TimeSpan.FromSeconds(
                    Math.Max(
                        SmallestTimeRecognisedByTestRunnerInSeconds,
                        (Stopwatch.GetTimestamp() - startTicks) / Stopwatch.Frequency));
            }
            catch (Exception exception)
            {
                MarkTestAsFailed(testCase, testResult, exception, logger);
            }

            return(testResult);
        }
 public AppiumDriver(TestRunContext testRunContext)
 {
     _testRunContext = testRunContext;
 }
Example #21
0
 public override void Execute(TestRunContext cx)
 {
     using (cx.CreateSystemDataCollector())
     {
         cx.Stopwatch.Start();
         Logger.Trace("some text (NLog)");
         cx.Stopwatch.Stop();
     }
 }
        public TestRunPublisherTests()
        {
            _attachmentFilePath = "attachment.txt";

            File.WriteAllText(_attachmentFilePath, "asdf");
            _testRunContext = new TestRunContext("owner", "platform", "config", 1, "builduri", "releaseuri", "releaseenvuri");

            _reader = new Mock <IResultReader>();
            _reader.Setup(x => x.ReadResults(It.IsAny <IExecutionContext>(), It.IsAny <string>(), It.IsAny <TestRunContext>()))
            .Callback <IExecutionContext, string, TestRunContext>
                ((executionContext, filePath, runContext) =>
            {
                _runContext      = runContext;
                _resultsFilepath = filePath;
            })
            .Returns((IExecutionContext executionContext, string filePath, TestRunContext runContext) =>
            {
                TestRunData trd = new TestRunData(
                    name: "xyz",
                    buildId: runContext.BuildId,
                    completedDate: "",
                    state: "InProgress",
                    isAutomated: true,
                    dueDate: "",
                    type: "",
                    buildFlavor: runContext.Configuration,
                    buildPlatform: runContext.Platform,
                    releaseUri: runContext.ReleaseUri,
                    releaseEnvironmentUri: runContext.ReleaseEnvironmentUri
                    );
                trd.Attachments = new string[] { "attachment.txt" };
                return(trd);
            });

            _testResultServer = new Mock <ITestResultsServer>();
            _testResultServer.Setup(x => x.InitializeServer(It.IsAny <VssConnection>()));
            _testResultServer.Setup(x => x.AddTestResultsToTestRunAsync(It.IsAny <TestCaseResult[]>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestCaseResult[], string, int, CancellationToken>
                ((currentBatch, projectName, testRunId, cancellationToken) =>
            {
                _batchSizes.Add(currentBatch.Length);
                _resultCreateModels = currentBatch;
            })
            .Returns(() =>
            {
                List <TestCaseResult> resultsList = new List <TestCaseResult>();
                int i = 0;
                foreach (TestCaseResult resultCreateModel in _resultCreateModels)
                {
                    resultsList.Add(new TestCaseResult()
                    {
                        Id = ++i
                    });
                }
                return(Task.FromResult(resultsList));
            });

            _testResultServer.Setup(x => x.CreateTestRunAsync(It.IsAny <string>(), It.IsAny <RunCreateModel>(), It.IsAny <CancellationToken>()))
            .Callback <string, RunCreateModel, CancellationToken>
                ((projectName, testRunData, cancellationToken) =>
            {
                _projectId = projectName;
                _testRun   = (TestRunData)testRunData;
            })
            .Returns(Task.FromResult(new TestRun()
            {
                Name = "TestRun", Id = 1
            }));

            _testResultServer.Setup(x => x.UpdateTestRunAsync(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <RunUpdateModel>(), It.IsAny <CancellationToken>()))
            .Callback <string, int, RunUpdateModel, CancellationToken>
                ((projectName, testRunId, updateModel, cancellationToken) =>
            {
                _runId            = testRunId;
                _projectId        = projectName;
                _updateProperties = updateModel;
            })
            .Returns(Task.FromResult(new TestRun()
            {
                Name = "TestRun", Id = 1
            }));

            _testResultServer.Setup(x => x.CreateTestRunAttachmentAsync(
                                        It.IsAny <TestAttachmentRequestModel>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestAttachmentRequestModel, string, int, CancellationToken>
                ((reqModel, projectName, testRunId, cancellationToken) =>
            {
                _attachmentRequestModel = reqModel;
                _projectId = projectName;
                _runId     = testRunId;
            })
            .Returns(Task.FromResult(new TestAttachmentReference()));

            _testResultServer.Setup(x => x.CreateTestResultAttachmentAsync(It.IsAny <TestAttachmentRequestModel>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestAttachmentRequestModel, string, int, int, CancellationToken>
                ((reqModel, projectName, testRunId, testCaseResultId, cancellationToken) =>
            {
                if (_resultsLevelAttachments.ContainsKey(testCaseResultId))
                {
                    _resultsLevelAttachments[testCaseResultId].Add(reqModel);
                }
                else
                {
                    _resultsLevelAttachments.Add(testCaseResultId, new List <TestAttachmentRequestModel>()
                    {
                        reqModel
                    });
                }
            })
            .Returns(Task.FromResult(new TestAttachmentReference()));
        }
Example #23
0
 public override void Execute(TestRunContext cx)
 {
     using (cx.CreateSystemDataCollector())
     {
         cx.Stopwatch.Start();
         Trace.WriteLine("Trace.WriteLine()");
         cx.Stopwatch.Stop();
     }
 }
Example #24
0
 public SwagEmSteps(TestRunContext context, SwagEmPage swagEmPage)
 {
     _context    = context;
     _swagEmPage = swagEmPage;
 }
        public void PublishBasicCTestResults()
        {
            SetupMocks();
            string cTestResultsToBeRead = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
                                          + "<Site BuildName=\"(empty)\" BuildStamp=\"20180515-1731-Experimental\" Name=\"(empty)\" Generator=\"ctest-3.11.0\" "
                                          + "CompilerName=\"\" CompilerVersion=\"\" OSName=\"Linux\" Hostname=\"3tnavBuild\" OSRelease=\"4.4.0-116-generic\" "
                                          + "OSVersion=\"#140-Ubuntu SMP Mon Feb 12 21:23:04 UTC 2018\" OSPlatform=\"x86_64\" Is64Bits=\"1\">"
                                          + "<Testing>"
                                          + "<StartDateTime>May 15 10:31 PDT</StartDateTime>"
                                          + "<StartTestTime>1526405497</StartTestTime>"
                                          + "<TestList>"
                                          + "<Test>./libs/MgmtVisualization/tests/LoggingSinkRandomTests.loggingSinkRandomTest_CallLoggingManagerCallback</Test>"
                                          + "<Test>./tools/simulator/test/simulator.SimulatorTest.readEventFile_mediaDetectedEvent_oneSignalEmitted</Test>"
                                          + "</TestList>"
                                          + "<Test Status =\"passed\">"
                                          + "<Name>LoggingSinkRandomTests.loggingSinkRandomTest_CallLoggingManagerCallback</Name>"
                                          + "<Path>./libs/MgmtVisualization/tests</Path>"
                                          + "<FullName>./libs/MgmtVisualization/tests/LoggingSinkRandomTests.loggingSinkRandomTest_CallLoggingManagerCallback</FullName>"
                                          + "<FullCommandLine>D:/a/r1/a/libs/MgmtVisualization/tests/MgmtVisualizationResultsAPI \"--gtest_filter=LoggingSinkRandomTests.loggingSinkRandomTest_CallLoggingManagerCallback\"</FullCommandLine>"
                                          + "<Results>"
                                          + "<NamedMeasurement type =\"numeric/double\" name=\"Execution Time\">"
                                          + "<Value>0.074303</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type =\"numeric/double\" name=\"Processors\">"
                                          + "<Value>1</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type =\"text/string\" name=\"Completion Status\">"
                                          + "<Value>Completed</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type =\"text/string\" name=\"Command Line\">"
                                          + "<Value>/home/ctc/jenkins/workspace/build_TNAV-dev_Pull-Request/build/libs/MgmtVisualization/tests/MgmtVisualizationTestPublicAPI \"--gtest_filter=loggingSinkRandomTests.loggingSinkRandomTest_CallLoggingCallback\"</Value>"
                                          + "</NamedMeasurement>"
                                          + "<Measurement>"
                                          + "<Value>output : [----------] Global test environment set-up.</Value>"
                                          + "</Measurement>"
                                          + "</Results>"
                                          + "</Test>"
                                          + "<Test Status =\"notrun\">"
                                          + "<Name>simulator.SimulatorTest.readEventFile_mediaDetectedEvent_oneSignalEmitted</Name>"
                                          + "<Path>./tools/simulator/test</Path>"
                                          + "<FullName>./tools/simulator/test/simulator.SimulatorTest.readEventFile_mediaDetectedEvent_oneSignalEmitted</FullName>"
                                          + "<FullCommandLine></FullCommandLine>"
                                          + "<Results>"
                                          + "<NamedMeasurement type =\"numeric/double\" name=\"Processors\">"
                                          + "<Value>1</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type =\"text/string\" name=\"Completion Status\">"
                                          + "<Value>Disabled</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type =\"text/string\" name=\"Command Line\">"
                                          + "<Value></Value>"
                                          + "</NamedMeasurement>"
                                          + "<Measurement>"
                                          + "<Value>Disabled</Value>"
                                          + "</Measurement>"
                                          + "</Results>"
                                          + "</Test>"
                                          + "<Test Status=\"passed\">"
                                          + "<Name>test_cgreen_run_named_test</Name>"
                                          + "<Path>./tests</Path>"
                                          + "<FullName>./tests/test_cgreen_run_named_test</FullName>"
                                          + "<FullCommandLine>/var/lib/jenkins/workspace/Cgreen-thoni56/build/build-c/tests/test_cgreen_c &quot;integer_one_should_assert_true&quot;</FullCommandLine>"
                                          + "<Results>"
                                          + "<NamedMeasurement type=\"numeric/double\" name=\"Execution Time\"><Value>0.00615707</Value></NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Completion Status\"><Value>Completed</Value></NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Command Line\"><Value>/var/lib/jenkins/workspace/Cgreen-thoni56/build/build-c/tests/test_cgreen_c &quot;integer_one_should_assert_true&quot;</Value></NamedMeasurement>"
                                          + "<Measurement>"
                                          + "<Value>Running &quot;all_c_tests&quot; (136 tests)..."
                                          + "Completed &quot;assertion_tests&quot;: 1 pass, 0 failures, 0 exceptions in 0ms."
                                          + "Completed &quot;all_c_tests&quot;: 1 pass, 0 failures, 0 exceptions in 0ms."
                                          + "</Value>"
                                          + "</Measurement>"
                                          + "</Results>"
                                          + "</Test>"
                                          + "<Test Status=\"passed\">"
                                          + "<Name>runner_test_cgreen_c</Name>"
                                          + "<Path>./tests</Path>"
                                          + "<FullName>./tests/runner_test_cgreen_c</FullName>"
                                          + "<FullCommandLine>D:/a/r1/a/Cgreen-thoni56/build/build-c/tools/cgreen-runner &quot;-x&quot; &quot;TEST&quot; &quot;libcgreen_c_tests.so&quot;</FullCommandLine>"
                                          + "<Results>"
                                          + "<NamedMeasurement type=\"numeric/double\" name=\"Execution Time\"><Value>0.499399</Value></NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Completion Status\"><Value>Completed</Value></NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Command Line\"><Value>/var/lib/jenkins/workspace/Cgreen-thoni56/build/build-c/tools/cgreen-runner &quot;-x&quot; &quot;TEST&quot; &quot;libcgreen_c_tests.so&quot;</Value></NamedMeasurement>"
                                          + "<Measurement>"
                                          + "<Value>	CGREEN EXCEPTION: Too many assertions within a single test."
                                          + "</Value>"
                                          + "</Measurement>"
                                          + "</Results>"
                                          + "</Test>"
                                          + "<Test Status=\"failed\">"
                                          + "<Name>WGET-testU-MD5-fail</Name>"
                                          + "<Path>E_/foo/sources</Path>"
                                          + "<FullName>E_/foo/sources/WGET-testU-MD5-fail</FullName>"
                                          + "<FullCommandLine>E:\\Tools\\cmake\\cmake-2.8.11-rc4-win32-x86\\bin\\cmake.exe &quot;-DTEST_OUTPUT_DIR:PATH=E:/foo/build-vs2008-visual/_cmake/modules/testU_WGET&quot;"
                                          + "&quot;-P&quot; &quot;E:/foo/sources/modules/testU/WGET-testU-MD5-fail.cmake&quot;</FullCommandLine>"
                                          + "<Results>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Exit Code\">"
                                          + "<Value>Failed</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Exit Value\">"
                                          + "<Value>0</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"numeric/double\" name=\"Execution Time\">"
                                          + "<Value>0.0760078</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Completion Status\">"
                                          + "<Value>Completed</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Command Line\">"
                                          + "<Value>E:\\Tools\\cmake\\cmake-2.8.11-rc4-win32-x86\\bin\\cmake.exe &quot;-DTEST_OUTPUT_DIR:PATH=E:/foo/build-vs2008-visual/_cmake/modules/testU_WGET&quot;"
                                          + "&quot;-P&quot; &quot;E:/foo/sources/modules/testU/WGET-testU-MD5-fail.cmake&quot;</Value>"
                                          + "</NamedMeasurement>"
                                          + "<Measurement>"
                                          + "<Value>-- Download of file://\\abc-mang.md5.txt"
                                          + "failed with message: [37]&quot;couldn&apos;t read a file:// file&quot;"
                                          + "</Value>"
                                          + "</Measurement>"
                                          + "</Results>"
                                          + "</Test>"
                                          + "<Test Status=\"failed\">"
                                          + "<Name>WGET-testU-noMD5</Name>"
                                          + "<Path>E_/foo/sources</Path>"
                                          + "<FullName>E_/foo/sources/WGET-testU-noMD5</FullName>"
                                          + "<FullCommandLine>E:\\Tools\\cmake\\cmake-2.8.11-rc4-win32-x86\\bin\\cmake.exe &quot;-DTEST_OUTPUT_DIR:PATH=E:/foo/build-vs2008-visual/_cmake/modules/testU_WGET&quot;"
                                          + "&quot;-P&quot; &quot;E:/foo/sources/modules/testU/WGET-testU-noMD5.cmake&quot;</FullCommandLine>"
                                          + "<Results>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Exit Code\">"
                                          + "<Value>Failed</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Exit Value\">"
                                          + "<Value>1</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"numeric/double\" name=\"Execution Time\">"
                                          + "<Value>0.0820084</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Completion Status\">"
                                          + "<Value>Completed</Value>"
                                          + "</NamedMeasurement>"
                                          + "<NamedMeasurement type=\"text/string\" name=\"Command Line\">"
                                          + "<Value>E:\\Tools\\cmake\\cmake-2.8.11-rc4-win32-x86\\bin\\cmake.exe &quot;-DTEST_OUTPUT_DIR:PATH=E:/foo/build-vs2008-visual/_cmake/modules/testU_WGET&quot;"
                                          + "&quot;-P&quot; &quot;E:/foo/sources/modules/testU/WGET-testU-noMD5.cmake&quot;</Value>"
                                          + "</NamedMeasurement>"
                                          + "<Measurement>"
                                          + "<Value>-- Download of file://\\abc-mang.md5.txt"
                                          + "failed with message: [37]&quot;couldn&apos;t read a file:// file&quot;"
                                          + "CMake Error at modules/Logging.cmake:121 (message):"
                                          + ""
                                          + ""
                                          + "test BAR_wget_file succeed: result is &quot;OFF&quot; instead of &quot;ON&quot;"
                                          + ""
                                          + "Call Stack (most recent call first):"
                                          + "modules/Test.cmake:74 (BAR_msg_fatal)"
                                          + "modules/testU/WGET-testU-noMD5.cmake:14 (BAR_check_equal)"
                                          + ""
                                          + ""
                                          + "</Value>"
                                          + "</Measurement>"
                                          + "</Results>"
                                          + "</Test>"
                                          + "<EndDateTime>May 15 10:37 PDT</EndDateTime>"
                                          + "<EndTestTime>1526405879</EndTestTime>"
                                          + "<ElapsedMinutes>6</ElapsedMinutes>"
                                          + "</Testing>"
                                          + "</Site>";

            _resultFile = "xunitresults.xml";
            File.WriteAllText(_resultFile, cTestResultsToBeRead);
            CTestParser      reader          = new CTestParser();
            TestRunContext   runContext      = new TestRunContext();
            TestDataProvider runDataProvider = reader.ParseTestResultFiles(_ec.Object, runContext, new List <string> {
                _resultFile
            });
            List <TestRunData> runData = runDataProvider.GetTestRunData();

            Assert.NotNull(runData[0].TestResults);
            Assert.Equal(6, runData[0].TestResults.Count);
            Assert.Equal(3, runData[0].TestResults.Count(r => r.Outcome.Equals("Passed")));
            Assert.Equal(2, runData[0].TestResults.Count(r => r.Outcome.Equals("Failed")));
            Assert.Equal(1, runData[0].TestResults.Count(r => r.Outcome.Equals("NotExecuted")));
            Assert.Equal("CTest Test Run  ", runData[0].RunCreateModel.Name);
            Assert.Equal("Completed", runData[0].TestResults[0].State);
            Assert.Equal("./libs/MgmtVisualization/tests/LoggingSinkRandomTests.loggingSinkRandomTest_CallLoggingManagerCallback", runData[0].TestResults[0].AutomatedTestName);
            Assert.Equal("./libs/MgmtVisualization/tests", runData[0].TestResults[0].AutomatedTestStorage);
            Assert.Equal("LoggingSinkRandomTests.loggingSinkRandomTest_CallLoggingManagerCallback", runData[0].TestResults[0].TestCaseTitle);
            Assert.Equal(null, runData[0].TestResults[0].AutomatedTestId);
            Assert.Equal(null, runData[0].TestResults[0].AutomatedTestTypeId);
        }
        public async Task Execute(IStepBinding stepBinding, IServiceProvider serviceProvider, Collection <TestResultMessage> messages, TestRunContext testRunContext)
        {
            var attempts = 0;

            do
            {
                attempts++;

                await stepBinding
                .Execute(serviceProvider, messages)
                .ConfigureAwait(false);

                if (attempts >= testRunContext.EventualSuccess.MaximumAttempts)
                {
                    break;
                }

                messages.Add(
                    new TestResultMessage(
                        TestResultMessage.StandardOutCategory,
                        $"{StepsExecutor.StepLogIndent}Passed at {DateTime.UtcNow:o}, waiting and checking again{Environment.NewLine}"));

                await Task
                .Delay(testRunContext.EventualSuccess.DelayBetweenAttempts)
                .ConfigureAwait(false);
            } while (true);
        }