示例#1
0
		private string PointToEllipseStr (double x, double y, string colour, TestRunData run_data, string line, int count)
		{
			return String.Format ("<Ellipse Fill=\"{0}\" Width=\"5\" Height=\"5\" Canvas.Left=\"{1}\" Canvas.Top=\"{2}\" " +
					"MouseEnter=\"EllipseMouseEnter\" MouseLeave=\"EllipseMouseLeave\" Tag=\"{3}\" />", colour, x - 2.5, y - 2.5,
					TagForEllipse (run_data, line, count));
		}
示例#2
0
		private string TagForEllipse (TestRunData run_data, string line, int count)
		{
			return String.Format ("{0}${1}${2}${3}${4}", run_data.StartTime, line, count, run_data.FailingIds, run_data.PassingIds);
		}
示例#3
0
		private void LoadRunData (string dir)
		{
			string run_file = Path.Combine (dir, XmlReport.TestRunFileName);

			if (!File.Exists (run_file))
				return;

			XPathDocument doc = new XPathDocument (run_file);
			XPathNavigator nav = doc.CreateNavigator ();
			XPathExpression expr = nav.Compile ("/TestRun");
			XPathNodeIterator iterator = nav.Select (expr);

			if (!iterator.MoveNext ())
				return;

			TestRunData test_run = new TestRunData ();
			if (!DateTime.TryParseExact (GetBaseDirectory (dir), "yyyy-MM-dd-HH-mm-ss", System.Globalization.CultureInfo.InvariantCulture, System.Globalization.DateTimeStyles.AssumeLocal, out test_run.StartTime))
				test_run.StartTime = DateTime.ParseExact (GetBaseDirectory (dir), "yyyy-MM-dd-hh-mm", System.Globalization.CultureInfo.InvariantCulture);
			test_run.NumTestsExecuted = Int32.Parse (iterator.Current.GetAttribute ("ExecutedTests", String.Empty));
			test_run.NumTestsPassed = Int32.Parse (iterator.Current.GetAttribute ("PassedTests", String.Empty));
			test_run.NumTestsFailed = Int32.Parse (iterator.Current.GetAttribute ("FailedTests", String.Empty));
			test_run.NumTestsIgnored = Int32.Parse (iterator.Current.GetAttribute ("IgnoredTests", String.Empty));
			test_run.NumTestsKnownFailure = Int32.Parse (iterator.Current.GetAttribute ("KnownFailures", String.Empty));

			test_run.FailingIds = GetFailingIds (run_file);
			test_run.PassingIds = GetPassingIds (run_file);

			test_runs.Add (test_run);
		}
示例#4
0
		//
		// Build a list of test runs that were not partial runs
		//
		private void BuildCompleteTestRunsList ()
		{
			int min = (int) (this_run.ExecutedTests.Count * 0.85);
			complete_test_runs = new List<TestRunData> ();

			foreach (TestRunData run in test_runs) {
				if (run.NumTestsExecuted < min)
					continue;
				complete_test_runs.Add (run);
			}

			TestRunData t = new TestRunData ();
			t.StartTime = this_run.StartTime;
			t.NumTestsExecuted = this_run.ExecutedTests.Count;
			t.NumTestsPassed = this_run.PassedTests.Count;
			t.NumTestsFailed = this_run.FailedTests.Count;
			t.NumTestsIgnored = this_run.IgnoredTests.Count;
			t.NumTestsKnownFailure = this_run.KnownFailures.Count;

			complete_test_runs.Add (t);
			
		}
示例#5
0
文件: Program.cs 项目: rgani/roslyn
        private static async Task SendRunStats(Options options, IDataStorage dataStorage, TimeSpan elapsed, RunAllResult result, int partitionCount, CancellationToken cancellationToken)
        {
            var testRunData = new TestRunData()
            {
                Cache = dataStorage.Name,
                ElapsedSeconds = (int)elapsed.TotalSeconds,
                IsJenkins = Constants.IsJenkinsRun,
                Is32Bit = !options.Test64,
                AssemblyCount = options.Assemblies.Count,
                ChunkCount = partitionCount,
                CacheCount = result.CacheCount,
                Succeeded = result.Succeeded
            };

            var request = new RestRequest("api/testData/run", Method.POST);
            request.RequestFormat = DataFormat.Json;
            request.AddParameter("text/json", JsonConvert.SerializeObject(testRunData), ParameterType.RequestBody);

            try
            {
                var client = new RestClient(Constants.DashboardUriString);
                var response = await client.ExecuteTaskAsync(request);
                if (response.StatusCode != System.Net.HttpStatusCode.NoContent)
                {
                    Logger.Log($"Unable to send results: {response.ErrorMessage}");
                }
            }
            catch
            {
                Logger.Log("Unable to send results");
            }
        }
        public TestRunPublisherTests()
        {
            _attachmentFilePath = "attachment.txt";

            File.WriteAllText(_attachmentFilePath, "asdf");
            _testRunContext = new TestRunContext("owner", "platform", "config", 1, "builduri", "releaseuri", "releaseenvuri");

            _reader = new Mock <IResultReader>();
            _reader.Setup(x => x.ReadResults(It.IsAny <IExecutionContext>(), It.IsAny <string>(), It.IsAny <TestRunContext>()))
            .Callback <IExecutionContext, string, TestRunContext>
                ((executionContext, filePath, runContext) =>
            {
                _runContext      = runContext;
                _resultsFilepath = filePath;
            })
            .Returns((IExecutionContext executionContext, string filePath, TestRunContext runContext) =>
            {
                TestRunData trd = new TestRunData(
                    name: "xyz",
                    buildId: runContext.BuildId,
                    completedDate: "",
                    state: "InProgress",
                    isAutomated: true,
                    dueDate: "",
                    type: "",
                    buildFlavor: runContext.Configuration,
                    buildPlatform: runContext.Platform,
                    releaseUri: runContext.ReleaseUri,
                    releaseEnvironmentUri: runContext.ReleaseEnvironmentUri
                    );
                trd.Attachments = new string[] { "attachment.txt" };
                return(trd);
            });

            _testResultServer = new Mock <ITestResultsServer>();
            _testResultServer.Setup(x => x.InitializeServer(It.IsAny <VssConnection>(), It.IsAny <IExecutionContext>()));
            _testResultServer.Setup(x => x.AddTestResultsToTestRunAsync(It.IsAny <TestCaseResult[]>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestCaseResult[], string, int, CancellationToken>
                ((currentBatch, projectName, testRunId, cancellationToken) =>
            {
                _batchSizes.Add(currentBatch.Length);
                _resultCreateModels = currentBatch;
            })
            .Returns(() =>
            {
                List <TestCaseResult> resultsList = new List <TestCaseResult>();
                int i = 0;
                int j = 1;
                foreach (TestCaseResult resultCreateModel in _resultCreateModels)
                {
                    List <TestSubResult> SubResults = null;
                    if (resultCreateModel.SubResults != null)
                    {
                        SubResults = new List <TestSubResult>();
                        foreach (var subresultdata in resultCreateModel.SubResults)
                        {
                            var subResult = new TestSubResult();
                            subResult.Id  = j++;
                            SubResults.Add(subResult);
                        }
                    }
                    resultsList.Add(new TestCaseResult()
                    {
                        Id = ++i, SubResults = SubResults
                    });
                }
                return(Task.FromResult(resultsList));
            });

            _testResultServer.Setup(x => x.CreateTestRunAsync(It.IsAny <string>(), It.IsAny <RunCreateModel>(), It.IsAny <CancellationToken>()))
            .Callback <string, RunCreateModel, CancellationToken>
                ((projectName, testRunData, cancellationToken) =>
            {
                _projectId = projectName;
                _testRun   = (TestRunData)testRunData;
            })
            .Returns(Task.FromResult(new TestRun()
            {
                Name = "TestRun", Id = 1
            }));

            _testResultServer.Setup(x => x.UpdateTestRunAsync(It.IsAny <string>(), It.IsAny <int>(), It.IsAny <RunUpdateModel>(), It.IsAny <CancellationToken>()))
            .Callback <string, int, RunUpdateModel, CancellationToken>
                ((projectName, testRunId, updateModel, cancellationToken) =>
            {
                _runId            = testRunId;
                _projectId        = projectName;
                _updateProperties = updateModel;
            })
            .Returns(Task.FromResult(new TestRun()
            {
                Name = "TestRun", Id = 1
            }));

            _testResultServer.Setup(x => x.CreateTestRunAttachmentAsync(
                                        It.IsAny <TestAttachmentRequestModel>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestAttachmentRequestModel, string, int, CancellationToken>
                ((reqModel, projectName, testRunId, cancellationToken) =>
            {
                _attachmentRequestModel = reqModel;
                _projectId = projectName;
                _runId     = testRunId;
            })
            .Returns(Task.FromResult(new TestAttachmentReference()));

            _testResultServer.Setup(x => x.CreateTestResultAttachmentAsync(It.IsAny <TestAttachmentRequestModel>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestAttachmentRequestModel, string, int, int, CancellationToken>
                ((reqModel, projectName, testRunId, testCaseResultId, cancellationToken) =>
            {
                if (_resultsLevelAttachments.ContainsKey(testCaseResultId))
                {
                    _resultsLevelAttachments[testCaseResultId].Add(reqModel);
                }
                else
                {
                    _resultsLevelAttachments.Add(testCaseResultId, new List <TestAttachmentRequestModel>()
                    {
                        reqModel
                    });
                }
            })
            .Returns(Task.FromResult(new TestAttachmentReference()));
            _testResultServer.Setup(x => x.CreateTestSubResultAttachmentAsync(It.IsAny <TestAttachmentRequestModel>(), It.IsAny <string>(), It.IsAny <int>(), It.IsAny <int>(), It.IsAny <int>(), It.IsAny <CancellationToken>()))
            .Callback <TestAttachmentRequestModel, string, int, int, int, CancellationToken>
                ((reqModel, projectName, testRunId, testCaseResultId, testSubResultId, cancellationToken) =>
            {
                if (_subResultsLevelAttachments.ContainsKey(testCaseResultId))
                {
                    if (_subResultsLevelAttachments[testCaseResultId].ContainsKey(testSubResultId))
                    {
                        _subResultsLevelAttachments[testCaseResultId][testSubResultId].Add(reqModel);
                    }
                    else
                    {
                        _subResultsLevelAttachments[testCaseResultId].Add(testSubResultId, new List <TestAttachmentRequestModel>()
                        {
                            reqModel
                        });
                    }
                }
                else
                {
                    Dictionary <int, List <TestAttachmentRequestModel> > subResulAtt = new Dictionary <int, List <TestAttachmentRequestModel> >();
                    subResulAtt.Add(testSubResultId, new List <TestAttachmentRequestModel>()
                    {
                        reqModel
                    });
                    _subResultsLevelAttachments.Add(testCaseResultId, subResulAtt);
                }
            })
            .Returns(Task.FromResult(new TestAttachmentReference()));
        }
 public void ReadResultsSendsRunTitleToReader()
 {
     SetupMocks();
     _testRunData = _publisher.ReadResultsFromFile(_testRunContext, "filepath", "runName");
     Assert.Equal("runName", _runContext.RunName);
 }
示例#8
0
        public void ReadResultsReturnsCorrectValues()
        {
            SetupMocks();
            string xunitResults = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" +
                                  "<assemblies>" +
                                  "<assembly name=\"C:/Users/kaadhina/Source/Workspaces/p1/ClassLibrary2/ClassLibrary2/bin/Debug/ClassLibrary2.DLL\" environment=\"64-bit .NET 4.0.30319.42000 [collection-per-class, parallel]\" test-framework=\"xUnit.net 2.0.0.2929\" run-date=\"2015-08-18\" run-time=\"06:17:15\" config-file=\"C:\\Users\\kaadhina\\Source\\Workspaces\\p1\\ClassLibrary2\\packages\\xunit.runner.console.2.0.0\\tools\\xunit.console.exe.Config\" total=\"2\" passed=\"1\" failed=\"1\" skipped=\"0\" time=\"0.233\" errors=\"0\">" +
                                  "<errors />" +
                                  "<collection total=\"2\" passed=\"1\" failed=\"1\" skipped=\"0\" name=\"Test collection for MyFirstUnitTests.Class1\" time=\"0.044\">" +
                                  "<test name=\"MyFirstUnitTests.Class1.FailingTest\" type=\"MyFirstUnitTests.Class1\" method=\"FailingTest\" time=\"1.0422319\" result=\"Fail\">" +
                                  "<failure exception-type=\"Xunit.Sdk.EqualException\" >" +
                                  "<message><![CDATA[Assert.Equal() Failure" +
                                  "Expected: 5" +
                                  "Actual: 4]]></message >" +
                                  "<stack-trace><![CDATA[at MyFirstUnitTests.Class1.FailingTest() in C: \\Users\\kaadhina\\Source\\Workspaces\\p1\\ClassLibrary2\\ClassLibrary2\\Class1.cs:line 17]]></stack-trace>" +
                                  "</failure >" +
                                  "</test>" +
                                  "<test name=\"MyFirstUnitTests.Class1.PassingTest\" type=\"MyFirstUnitTests.Class1\" method=\"PassingTest\" time=\"0.0014079\" result=\"Pass\">" +
                                  "<traits>" +
                                  "<trait name=\"priority\" value=\"0\" />" +
                                  "<trait name=\"owner\" value=\"asdf\" />" +
                                  "</traits>" +
                                  "</test>" +
                                  "</collection>" +
                                  "</assembly>" +
                                  "<assembly name=\"C:\\Users\\kaadhina\\Source\\Workspaces\\p1\\ClassLibrary2\\ClassLibrary1\\bin\\Debug\\ClassLibrary1.DLL\" environment=\"64-bit .NET 4.0.30319.42000 [collection-per-class, parallel]\" test-framework=\"xUnit.net 2.0.0.2929\" run-date=\"2015-08-18\" run-time=\"06:17:16\" config-file=\"C:\\Users\\kaadhina\\Source\\Workspaces\\p1\\ClassLibrary2\\packages\\xunit.runner.console.2.0.0\\tools\\xunit.console.exe.Config\" total=\"2\" passed=\"2\" failed=\"0\" skipped=\"0\" time=\"0.152\" errors=\"0\">" +
                                  "<errors />" +
                                  "<collection total=\"2\" passed=\"2\" failed=\"0\" skipped=\"0\" name=\"Test collection for MyFirstUnitTests.Class2\" time=\"0.006\">" +
                                  "<test name=\"MyFirstUnitTests.Class2.tset2\" type=\"MyFirstUnitTests.Class2\" method=\"tset2\" time=\"0.0056931\" result=\"Pass\" />" +
                                  "<test name=\"MyFirstUnitTests.Class2.test1\" type=\"MyFirstUnitTests.Class2\" method=\"test1\" time=\"0.0001093\" result=\"Pass\">" +
                                  "<traits>" +
                                  "<trait name=\"priority\" value=\"0\" />" +
                                  "</traits>" +
                                  "</test>" +
                                  "</collection>" +
                                  "</assembly>" +
                                  "</assemblies>";

            _xUnitResultFile = "XUnitResults.xml";
            File.WriteAllText(_xUnitResultFile, xunitResults);

            XUnitResultReader reader  = new XUnitResultReader();
            TestRunData       runData = reader.ReadResults(_ec.Object, _xUnitResultFile, new TestRunContext("Owner", "any cpu", "debug", 1, "", "releaseUri", "releaseEnvironmentUri"));

            Assert.Equal("XUnit Test Run debug any cpu", runData.Name);
            Assert.Equal(4, runData.Results.Length);
            Assert.Equal("debug", runData.BuildFlavor);
            Assert.Equal("any cpu", runData.BuildPlatform);
            Assert.Equal("1", runData.Build.Id);
            Assert.Equal(1, runData.Attachments.Length);

            Assert.Equal("Failed", runData.Results[0].Outcome);
            Assert.Equal("FailingTest", runData.Results[0].TestCaseTitle);
            Assert.Equal("MyFirstUnitTests.Class1.FailingTest", runData.Results[0].AutomatedTestName);
            Assert.Equal("Assert.Equal() FailureExpected: 5Actual: 4", runData.Results[0].ErrorMessage);
            Assert.Equal("at MyFirstUnitTests.Class1.FailingTest() in C: \\Users\\kaadhina\\Source\\Workspaces\\p1\\ClassLibrary2\\ClassLibrary2\\Class1.cs:line 17", runData.Results[0].StackTrace);
            Assert.Equal("Owner", runData.Results[0].RunBy.DisplayName);
            Assert.Equal("Completed", runData.Results[0].State);
            Assert.Equal("1042", runData.Results[0].DurationInMs.ToString());
            Assert.Equal("ClassLibrary2.DLL", runData.Results[0].AutomatedTestStorage);


            Assert.Equal("Passed", runData.Results[1].Outcome);
            Assert.Equal("0", runData.Results[1].Priority.ToString());
            Assert.Equal("asdf", runData.Results[1].Owner.DisplayName);

            Assert.Equal(null, runData.Results[0].AutomatedTestId);
            Assert.Equal(null, runData.Results[0].AutomatedTestTypeId);

            double runDuration = 0;

            foreach (TestCaseResultData result in runData.Results)
            {
                runDuration += result.DurationInMs;
            }

            DateTime startDate;

            DateTime.TryParse(runData.StartDate, out startDate);
            DateTime completeDate;

            DateTime.TryParse(runData.CompleteDate, out completeDate);
            TimeSpan duration = completeDate - startDate;

            Assert.Equal((completeDate - startDate).TotalMilliseconds, runDuration);

            Assert.Equal("releaseUri", runData.ReleaseUri);
            Assert.Equal("releaseEnvironmentUri", runData.ReleaseEnvironmentUri);
        }
示例#9
0
        private void invokeTests(TestRunData result, Serializer serializer, Test test, MemoryStream targetStream)
        {
            const int ERROR_CUTOFF = 3;

            const int ABORT_CUTOFF = 16;

            var streamWrap = new NFX.IO.NonClosingStreamWrap(targetStream);


            var serExceptions = 0;
            var wasOk         = false;

            test.BeforeSerializationIterationBatch(serializer);
            serializer.BeforeSerializationIterationBatch(test);

            var sw = Stopwatch.StartNew();

            for (var i = 0; Running && i < test.SerIterations; i++)
            {
                targetStream.Position = 0;

                try
                {
                    test.PerformSerializationTest(serializer, streamWrap);
                    if (test.Aborted)
                    {
                        DataStore.SaveTestData(new AbortedData(serializer, test, AbortedFrom.Serialization, test.AbortMessage));

                        result.SerAborts++;
                        result.FirstSerAbortMsg = test.AbortMessage;
                        test.ResetAbort();
                        if (result.SerAborts == ABORT_CUTOFF)
                        {
                            i = test.SerIterations;
                            throw new SerbenchException("Too many aborts {0}. Iterations run interrupted".Args(result.SerAborts));
                        }

                        continue;
                    }
                    wasOk = true;
                }
                catch (Exception error)
                {
                    serExceptions++;
                    log(MessageType.Error, "Serilizing '{0}'-'{1}'".Args(serializer.Name, test.Name), error.ToMessageWithType(), error);
                    if (!wasOk && serExceptions == ERROR_CUTOFF)
                    {
                        result.SerExceptions = serExceptions;
                        result.SerSupported  = false;
                        log(MessageType.Error, "Serilizing '{0}'-'{1}'".Args(serializer.Name, test.Name), "Ser test aborted in ser phase. Too many consequitive exceptions");
                        return;
                    }
                }
            }

            sw.Stop();
            result.SerSupported  = wasOk;
            result.SerExceptions = serExceptions;
            result.PayloadSize   = (int)targetStream.Position;
            result.SerIterations = test.SerIterations;
            result.SerDurationMs = sw.ElapsedMilliseconds;
            if ((result.SerDurationTicks = sw.ElapsedTicks) > 0)
            {
                result.SerOpsSec = (int)(test.SerIterations / ((double)result.SerDurationTicks / (double)Stopwatch.Frequency));
            }

            if (!result.SerSupported)
            {
                throw new SerbenchException("Test run failed as serialization not supported");
            }

            if (result.SerIterations == 0)
            {
                throw new SerbenchException("Test run failed as nothing was serialized. Test must be configured with at least 1 serialization iteration to succeed");
            }

            if (result.PayloadSize == 0)
            {
                throw new SerbenchException("Test run failed as no payload generated by serialization");
            }


            var readingStreamSegment = new NFX.IO.BufferSegmentReadingStream();

            var deserExceptions = 0;

            wasOk = false;
            test.BeforeDeserializationIterationBatch(serializer);
            serializer.BeforeDeserializationIterationBatch(test);


            var doDump =
                (this.DumpPayload && !serializer.DumpPayload.HasValue && !test.DumpPayload.HasValue) ||
                (serializer.DumpPayload.HasValue && serializer.DumpPayload.Value && (!test.DumpPayload.HasValue || (test.DumpPayload.HasValue && test.DumpPayload.Value))) ||
                (test.DumpPayload.HasValue && test.DumpPayload.Value && (!serializer.DumpPayload.HasValue || (serializer.DumpPayload.HasValue && serializer.DumpPayload.Value)));

            if (doDump)
            {
                readingStreamSegment.BindBuffer(targetStream.GetBuffer(), 0, result.PayloadSize);
                DataStore.SaveTestPayloadDump(serializer, test, readingStreamSegment);
                log(MessageType.Info, "{0}->{1}".Args(serializer.Name, test.Name), "Saved test payload dump of {0} bytes".Args(result.PayloadSize));
            }

            sw.Restart();

            for (var i = 0; Running && i < test.DeserIterations; i++)
            {
                targetStream.Position = 0;
                readingStreamSegment.BindBuffer(targetStream.GetBuffer(), 0, result.PayloadSize);
                try
                {
                    test.PerformDeserializationTest(serializer, readingStreamSegment);
                    if (test.Aborted)
                    {
                        DataStore.SaveTestData(new AbortedData(serializer, test, AbortedFrom.Deserialization, test.AbortMessage));

                        result.DeserAborts++;
                        result.FirstDeserAbortMsg = test.AbortMessage;
                        test.ResetAbort();
                        if (result.DeserAborts == ABORT_CUTOFF)
                        {
                            i = test.DeserIterations;
                            throw new SerbenchException("Too many aborts {0}. Iterations run interrupted".Args(result.DeserAborts));
                        }
                        continue;
                    }
                    wasOk = true;
                }
                catch (Exception error)
                {
                    deserExceptions++;
                    log(MessageType.Error, "Deserializing '{0}'-'{1}'".Args(serializer.Name, test.Name), error.ToMessageWithType(), error);
                    if (!wasOk && deserExceptions == ERROR_CUTOFF)
                    {
                        result.DeserExceptions = deserExceptions;
                        result.DeserSupported  = false;
                        log(MessageType.Error, "Deserializing '{0}'-'{1}'".Args(serializer.Name, test.Name), "Test aborted in deser phase. Too many consequitive exceptions");
                        return;
                    }
                }
            }

            sw.Stop();
            result.DeserSupported  = wasOk;
            result.DeserIterations = test.DeserIterations;
            result.DeserDurationMs = sw.ElapsedMilliseconds;
            if ((result.DeserDurationTicks = sw.ElapsedTicks) > 0)
            {
                result.DeserOpsSec = (int)(test.DeserIterations / ((double)result.DeserDurationTicks / (double)Stopwatch.Frequency));
            }
        }