public void Should_build_the_command_line_for_each_run() { _configuration .Stub(x => x.MSpecTestRunner("framework 1")) .Return("c:\\runner 1.exe"); _configuration .Stub(x => x.MSpecTestRunner("framework 2")) .Return("c:\\runner 2.exe"); _fileSystem .Stub(x => x.FileExists(null)) .IgnoreArguments() .Return(true); var document1 = new ProjectDocument(ProjectType.CSharp); document1.SetFramework("framework 1"); var info1 = new TestRunInfo(new Project("key 1", document1), "assembly 1"); var document2 = new ProjectDocument(ProjectType.CSharp); document2.SetFramework("framework 2"); var info2 = new TestRunInfo(new Project("key 2", document2), "assembly 2"); var testRunInfos = new[] { info1, info2 }; _runner.RunTests(testRunInfos, null, null); _commandLineBuilder.AssertWasCalled(x => x.Build(null), o => o.IgnoreArguments().Repeat.Twice()); }
public void DoubleSubCommand_BindsCorrectly() { RunInfoBuilder builder = GetBuilder(); var args = new string[] { "ComplexCommand", "one", "33", "5", "6", "-s", "short_string", "--int=100", "--bool1", "DoubleSubCommand", "3.7", "10.1", "10.2", "10.3", "9", "8", "10.5" }; TestRunInfo result = (TestRunInfo)builder.Build(args); //Assert.Equal(2, result.Int1); Assert.Equal(33, result.Int2); Assert.Equal(11, result.Int3); //Assert.Equal("short_string", result.String1); Assert.Equal(100, result.Int1); // should override the fisrt arg "one" Assert.True(result.Bool1); Assert.Equal(3.7, result.Double1); Assert.Equal(3, result.DoubleList1.Count); Assert.Equal(10.1, result.DoubleList1[0]); Assert.Equal(10.2, result.DoubleList1[1]); Assert.Equal(10.3, result.DoubleList1[2]); }
public void Should_rerun_test_if_pre_processor_says_so() { _runInfo.ShouldNotBuild(); _project.Value.SetOutputPath(""); _project.Value.SetAssemblyName("someProject.dll"); var info = new TestRunInfo(_project, ""); _listGenerator.Stub(l => l.Generate(null)).IgnoreArguments().Return(new string[] { "some file.csproj" }); _configuration.Stub(c => c.BuildExecutable(_project.Value)).Return("invalid_to_not_run_builds.exe"); var result = new TestRunResults[] { new TestRunResults("", "", false, TestRunner.NUnit, new TestResult[] { }) }; _testRunner.Stub(t => t.CanHandleTestFor(info.Assembly)).IgnoreArguments().Return(true); _testRunner.Stub(t => t.RunTests(new TestRunInfo[] { info }, null, null)).IgnoreArguments() .Return(result); _runInfo.ShouldRerunAllTestWhenFinishedFor(TestRunner.Any); _removedTestLocator.Stub(r => r.SetRemovedTestsAsPassed(null, null)).IgnoreArguments().Return(result[0]); _removedTestLocator.Stub(r => r.RemoveUnmatchedRunInfoTests(null, null)).IgnoreArguments().Return(new List <TestRunResults>()); _testAssemblyValidator.Stub(t => t.ShouldNotTestAssembly("")).IgnoreArguments().Return(false); var message = new ProjectChangeMessage(); message.AddFile(new ChangedFile("some file.csproj")); _consumer.Consume(message); _testRunner.AssertWasCalled(t => t.RunTests(new TestRunInfo[] { new TestRunInfo(null, "") }, null, null), t => t.IgnoreArguments().Repeat.Twice()); }
public void Should_locate_removed_test_in_partial_test_run() { var results = new TestRunResults("project1", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test1"), new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test2"), new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test3"), new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test4") }); var cache = new RunResultCache(); cache.Merge(results); var infos = new TestRunInfo[] { new TestRunInfo(new Project("project", new ProjectDocument(ProjectType.CSharp)), "assembly") }; infos[0].AddTestsToRun(new TestToRun[] { new TestToRun(TestRunner.NUnit, "Test1"), new TestToRun(TestRunner.NUnit, "Test2"), new TestToRun(TestRunner.NUnit, "Test3") }); results = new TestRunResults("project1", "assembly", true, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Ignored, "Test1") }); var locator = new RemovedTestsLocator(cache); results = locator.SetRemovedTestsAsPassed(results, infos); results.Passed.Length.ShouldEqual(2); results.Passed[0].Name.ShouldEqual("Test3"); results.Passed[1].Name.ShouldEqual("Test2"); }
public void Should_check_the_runner_exe_for_each_framework() { _configuration .Stub(x => x.MSpecTestRunner("framework 1")) .Return("c:\\runner 1.exe"); _configuration .Stub(x => x.MSpecTestRunner("framework 2")) .Return("c:\\runner 2.exe"); var document1 = new ProjectDocument(ProjectType.CSharp); document1.SetFramework("framework 1"); var info1 = new TestRunInfo(new Project("key 1", document1), "assembly 1"); var document2 = new ProjectDocument(ProjectType.CSharp); document2.SetFramework("framework 2"); var info2 = new TestRunInfo(new Project("key 2", document2), "assembly 2"); var testRunInfos = new[] { info1, info2 }; _runner.RunTests(testRunInfos, null, null); _fileSystem.AssertWasCalled(x => x.FileExists("c:\\runner 1.exe")); _fileSystem.AssertWasCalled(x => x.FileExists("c:\\runner 2.exe")); }
public void Should_not_remove_tests_from_different_runners() { var cache = new RunResultCache(); cache.EnabledDeltas(); var results = new TestRunResults("project", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test1"), }); cache.Merge(results); var mah = cache.PopDeltas(); mah.AddedTests.Length.ShouldEqual(1); var infos = new TestRunInfo[] { new TestRunInfo(new Project("project", new ProjectDocument(ProjectType.CSharp)), "assembly") }; results = new TestRunResults("project", "assembly", false, TestRunner.XUnit, new TestResult[] { new TestResult(TestRunner.XUnit, TestRunStatus.Failed, "Test1"), }); var locator = new RemovedTestsLocator(cache); results = locator.SetRemovedTestsAsPassed(results, infos); cache.Merge(results); results.Passed.Length.ShouldEqual(0); var meh = cache.PopDeltas(); meh.RemovedTests.Length.ShouldEqual(0); }
private async Task RampUpAsync(TestRunInfo testRunInfo) { // Handle ramp up if defined if (testRunInfo.RampUpTimeSeconds > 4 && !_terminate) { Tracer.TraceInfo($"Ramping up starts."); DateTime startTime = DateTime.Now; DateTime endTime = startTime + TimeSpan.FromSeconds(testRunInfo.RampUpTimeSeconds); int numberIntervals = Math.Min(testRunInfo.RampUpTimeSeconds / 5, 6); TimeSpan intervalLength = (endTime - startTime) / numberIntervals; double intervalRpsDelta = ((double)testRunInfo.TargetRPS) / ((double)numberIntervals); for (int i = 0; i < numberIntervals && !_terminate; i++) { var apiInfo = _mixInfo.ApiMix[i % _mixInfo.ApiMix.Count]; long intervalRps = (long)Math.Round((i + 1) * intervalRpsDelta); Tracer.TraceInfo($"Ramping up. RPS = {intervalRps}"); AsyncFor myRampUpFor = new AsyncFor(intervalRps, GetResourceDescription(apiInfo, _mixInfo), GetTestDescription(apiInfo), testRunInfo.MeasureServerSideTime); myRampUpFor.PerSecondMetricsAvailable += new ConsoleMetricsHandler().MetricsAvailableHandler; _asyncForInstances.Add(myRampUpFor); await myRampUpFor.For(intervalLength, testRunInfo.SimultaneousConnections, new MaaServiceApiCaller(apiInfo, _mixInfo.ProviderMix, testRunInfo.EnclaveInfoFile, testRunInfo.ForceReconnects).CallApi); } Tracer.TraceInfo($"Ramping up complete."); } }
public void FillFromSubmission(Submission submit) { submit.LoadLog(); isAccepted = submit.Outcome == ICPCOutcomeMapper.Accepted; wasJudged = !OutcomeMapper.IsNotJudgeableOutcome(submit.Outcome) && !OutcomeMapper.IsNotJudgedYetOutcome(submit.Outcome); if (!OutcomeMapper.IsNotJudgeableOutcome(submit.Outcome)) { List <TestRunInfo> tests = submit.Log.TestCollection; for (int i = 1; i <= tests.Count; ++i) { TestRunInfo t = tests[i - 1]; maxMemory = Math.Max(maxMemory, t.RunResult.MemoryUsed); maxTime = Math.Max(MaxTime, t.RunResult.TimeWorked); if (t.CheckStatus != CheckStatus.Ok) { stopTest = i; break; } } } }
private string getFramework(TestRunInfo runInfo) { if (runInfo.Project == null) { return(""); } return(runInfo.Project.Value.Framework); }
private TestRunResults getTestResults(TestRunInfo runInfo) { string project = ""; if (runInfo.Project != null) { project = runInfo.Project.Key; } return(new TestRunResults(project, runInfo.Assembly, _isPartialTestRuns, _runner, _result.ToArray())); }
private string getTestsList(TestRunInfo runInfo) { var tests = ""; foreach (var test in runInfo.GetTestsFor(TestRunner.MSTest)) { tests += string.Format("/test:{0} ", test); } return(tests); }
private string getTestsList(TestRunInfo runInfo) { var tests = ""; foreach (var test in runInfo.GetTestsFor(TestRunner.NUnit)) { tests += (tests.Length > 0 ? "," : "") + test; } return(tests); }
TestRunInfo ToInfo(TestRun run) { var tri = new TestRunInfo(); tri.Id = run.Id; tri.Name = run.Name; tri.When = run.When; tri.Description = "Test run on " + run.When; tri.Results = GetResultInfos(run.Id); return(tri); }
public void SetUp() { var bus = MockRepository.GenerateMock <IMessageBus>(); _parser = new NUnitTestResponseParser(bus, TestRunner.NUnit); var sources = new TestRunInfo[] { new TestRunInfo(new Project("project1", null), "/SomePath/AutoTest.WinForms.Test/bin/Debug/AutoTest.WinForms.Test.dll") }; _parser.Parse(File.ReadAllText("TestResources/NUnit/singleAssembly.txt"), sources, true); }
static TestRunInfo ProcessTestRunInfo(TestRunInfo tri) { var tri2 = new TestRunInfo(tri); tri2.Results = tri.Results .ToDictionary(a => a.Key, delegate(KeyValuePair <string, TestResultInfo> entry) { var replacement = new TestResultInfo(entry.Value); replacement.Artifacts = entry.Value.Artifacts .Select((ai, i) => ProcessArtifactInfo(tri.Id, entry.Key, i, ai)).ToArray();; return(replacement); }); return(tri2); }
public void SetUp() { var bus = MockRepository.GenerateMock <IMessageBus>(); _parser = new NUnitTestResponseParser(bus, TestRunner.NUnit); var sources = new TestRunInfo[] { new TestRunInfo(new Project("project1", null), @"C:\Users\ack\src\SomeProject\SomeFile.dll") }; var text = File.ReadAllText("TestResources/NUnit/FailsToParse.txt"); text = text.Replace("\r\n", "").Replace("\n", ""); _parser.Parse(text, sources, true); }
public void SetUp() { var bus = MockRepository.GenerateMock <IMessageBus>(); _parser = new NUnitTestResponseParser(bus, TestRunner.NUnit); var sources = new TestRunInfo[] { new TestRunInfo(new Project("project1", null), "/home/ack/src/AutoTest.Net/src/AutoTest.TestCore/bin/Debug/AutoTest.TestCore.dll"), new TestRunInfo(new Project("project2", null), "/home/ack/src/AutoTest.Net/src/AutoTest.Test/bin/Debug/AutoTest.Test.dll"), new TestRunInfo(new Project("project3", null), "/home/ack/src/AutoTest.Net/src/AutoTest.WinForms.Test/bin/Debug/AutoTest.WinForms.Test.dll") }; _parser.Parse(File.ReadAllText("TestResources/NUnit/NewOutput.txt"), sources, false); }
public GenericVerdict Judge(TestRunInfo checkResult) { if (checkStatusMap.ContainsKey(checkResult.CheckResult.CheckStatus)) { return(checkStatusMap[checkResult.CheckResult.CheckStatus]); } if (runStatusMap.ContainsKey(checkResult.RunResult.Status)) { return(runStatusMap[checkResult.RunResult.Status]); } return(GenericVerdict.Ok); }
public void SetUp() { var bus = MockRepository.GenerateMock <IMessageBus>(); _parser = new NUnitTestResponseParser(bus, TestRunner.XUnit); var sources = new TestRunInfo[] { new TestRunInfo(new Project("project1", null), string.Format("/home/ack/backup/WorkWin7/src/DotNET/Temp/SomeProjectUsingXUnit/bin/Debug/SomeProjectUsingXUnit.dll", Path.DirectorySeparatorChar)) }; var text = File.ReadAllText("TestResources/NUnit/XUnitOutput.txt"); text = text.Replace("\r\n", "").Replace("\n", ""); _parser.Parse(text, sources, false); }
public void Should_not_create_a_filter_file_when_all_tests_are_run() { var document = new ProjectDocument(ProjectType.CSharp); document.SetFramework("framework 1"); var info = new TestRunInfo(new Project("key 1", document), "assembly 1"); var infos = new[] { info }; var run = new MSpecTestRunner.Run { RunInfos = infos }; var args = _builder.Build(run); Assert.That(args, Is.Not.StringContaining("--filter")); }
public void Should_report_the_time_info() { var document = new ProjectDocument(ProjectType.CSharp); document.SetFramework("framework 1"); var info = new TestRunInfo(new Project("key 1", document), "assembly 1"); var infos = new[] { info }; var run = new MSpecTestRunner.Run { RunInfos = infos }; var args = _builder.Build(run); Assert.That(args, Is.StringContaining("--timeinfo")); }
public string[] GetInfo(Submission s) { string[] ans = new string[] { "", "", "" }; if (s.Outcome != OutcomeManager.CompilationError && s.Outcome != OutcomeManager.Compiling && s.Outcome != OutcomeManager.Waiting && s.Outcome != OutcomeManager.Running && s.Outcome != OutcomeManager.CannotJudge && s.Outcome != OutcomeManager.TestingFailure) { double maxtime = -1, maxmem = -1; int stoptest = -1; s.LoadLog(); for (int i = 0; i < s.Log.TestCollection.Count; i++) { TestRunInfo t = s.Log.TestCollection[i]; if (t.RunResult.MemoryUsed > maxmem) { maxmem = t.RunResult.MemoryUsed; } if (t.RunResult.TimeWorked > maxtime) { maxtime = t.RunResult.TimeWorked; } if (t.CheckStatus != CheckStatus.Ok) { stoptest = i + 1; break; } } if (stoptest > 0) { ans[0] = stoptest.ToString(); } if (maxtime > 0) { ans[1] = Math.Round(maxtime / 1000, 4) + " сек"; } if (maxmem > 0) { ans[2] = maxmem / 1024 + " КБ"; } } return(ans); }
private async Task RampUpAsync(TestRunInfo testRunInfo) { // Handle ramp up if defined if (testRunInfo.RampUpTimeSeconds > 4 && !_cancellationTokenSource.IsCancellationRequested) { Tracer.TraceInfo($"Ramping up starts."); DateTime startTime = DateTime.Now; DateTime endTime = startTime + TimeSpan.FromSeconds(testRunInfo.RampUpTimeSeconds); int numberIntervals = Math.Min(testRunInfo.RampUpTimeSeconds / 5, 6); TimeSpan intervalLength = (endTime - startTime) / numberIntervals; double intervalRpsDelta = ((double)testRunInfo.TargetRPS) / ((double)numberIntervals); for (int i = 0; i < numberIntervals && !_cancellationTokenSource.IsCancellationRequested; i++) { var apiInfo = _mixInfo.ApiMix[i % _mixInfo.ApiMix.Count]; long intervalRps = (long)Math.Round((i + 1) * intervalRpsDelta); Tracer.TraceInfo($"Ramping up. RPS = {intervalRps}"); AsyncFor myRampUpFor = new AsyncFor(intervalRps, GetResourceDescription(apiInfo, _mixInfo), GetTestDescription(apiInfo), testRunInfo.MeasureServerSideTime); myRampUpFor.PerSecondMetricsAvailable += new ConsoleMetricsHandler().MetricsAvailableHandler; _asyncForInstances.Add(myRampUpFor); try { await myRampUpFor.ForAsync( intervalLength, testRunInfo.SimultaneousConnections, new MaaServiceApiCaller(apiInfo, _mixInfo.ProviderMix, testRunInfo.EnclaveInfoFile, testRunInfo.ForceReconnects).CallApi, _cancellationTokenSource.Token); } catch (TaskCanceledException) { // Ignore task cancelled if we requested cancellation via ctrl-c if (_cancellationTokenSource.IsCancellationRequested) { Tracer.TraceInfo(($"Organized shutdown in progress. All asyncfor instances have gracefully shutdown.")); } else { throw; } } } Tracer.TraceInfo($"Ramping up complete."); } }
public void Should_create_an_xml_report() { var document = new ProjectDocument(ProjectType.CSharp); document.SetFramework("framework 1"); var info = new TestRunInfo(new Project("key 1", document), "assembly 1"); var infos = new[] { info }; var run = new MSpecTestRunner.Run { RunInfos = infos }; var args = _builder.Build(run); Assert.That(args, Is.StringContaining("--xml")); Assert.That(run.Cleanups.Count(), Is.EqualTo(1)); Assert.That(run.Harvesters.Count(), Is.EqualTo(1)); }
public TestLog Test() { if (!compilationSucceeded) { return(result); } result.CheckResults = new List <TestRunInfo>(); foreach (var test in testInfo.Tests) { TestRunInfo testRunInfo = RunOneTest(test); result.CheckResults.Add(testRunInfo); if (!submissionInfo.RunAllTests && ShouldBreak(testRunInfo)) { break; } } return(result); }
public void Should_create_the_assembly_list() { var document = new ProjectDocument(ProjectType.CSharp); document.SetFramework("framework 1"); var info1 = new TestRunInfo(new Project("key 1", document), "assembly 1"); var info2 = new TestRunInfo(new Project("key 2", document), "assembly 2"); var infos = new[] { info1, info2 }; var run = new MSpecTestRunner.Run { RunInfos = infos }; var args = _builder.Build(run); Assert.That(args, Is.StringContaining(" \"assembly 1\"")); Assert.That(args, Is.StringContaining(" \"assembly 2\"")); }
/// <summary> /// Returns a summary item for a given tcm data object for a test run. /// </summary> private TestSummaryItem GetTestRunSummaryInfo(TestResultsDetailsForGroup resultsForGroup) { _logger.LogInformation($"Getting Test summary data for test run - {resultsForGroup.GroupByValue}"); TestRunInfo runInfo = ReadGroupByValue(resultsForGroup); var summaryItem = new TestSummaryItem { Name = string.IsNullOrWhiteSpace(runInfo.Name) ? runInfo.Id.ToString() : runInfo.Name, Id = runInfo.Id.ToString() }; ParseBaseData(resultsForGroup, summaryItem); return(summaryItem); }
public void Should_create_the_assembly_list_from_distinct_assembly_names() { var document = new ProjectDocument(ProjectType.CSharp); document.SetFramework("framework 1"); var info1 = new TestRunInfo(new Project("key 1", document), "assembly 1"); var info2 = new TestRunInfo(new Project("key 2", document), "assembly 1"); var infos = new[] { info1, info2 }; var run = new MSpecTestRunner.Run { RunInfos = infos }; var args = _builder.Build(run); var assembly1Count = new Regex("assembly 1").Matches(args).Count; Assert.That(assembly1Count, Is.EqualTo(1)); }
public void Should_remove_tests_for_run_infos_having_run_all_tests() { var results = new TestRunResults("project1", "assembly", false, TestRunner.NUnit, new TestResult[] { new TestResult(TestRunner.NUnit, TestRunStatus.Failed, "Test1") }); var cache = new RunResultCache(); cache.Merge(results); var runInfo = new TestRunInfo(new Project("project1", new ProjectDocument(ProjectType.CSharp)), "assembly"); var locator = new RemovedTestsLocator(cache); var output = locator.RemoveUnmatchedRunInfoTests(new TestRunResults[] {}, new TestRunInfo[] { runInfo }); output.Count.ShouldEqual(1); output[0].Passed.Length.ShouldEqual(1); output[0].Passed[0].Name.ShouldEqual("Test1"); }
public void Should_pre_process_run_information() { _project.Value.SetOutputPath(""); _project.Value.SetAssemblyName("someProject.dll"); var info = new TestRunInfo(_project, "someProject.dll"); _listGenerator.Stub(l => l.Generate(null)).IgnoreArguments().Return(new string[] { "some file.csproj" }); _configuration.Stub(c => c.BuildExecutable(_project.Value)).Return("invalid_to_not_run_builds.exe"); _testRunner.Stub(t => t.CanHandleTestFor(info.Assembly)).Return(true); _testRunner.Stub(t => t.RunTests(new TestRunInfo[] { info }, null, null)).IgnoreArguments() .Return(new TestRunResults[] { new TestRunResults("", "", false, TestRunner.NUnit, new TestResult[] { }) }); _testAssemblyValidator.Stub(t => t.ShouldNotTestAssembly("")).IgnoreArguments().Return(true); var message = new ProjectChangeMessage(); message.AddFile(new ChangedFile("some file.csproj")); _consumer.Consume(message); _preProcessor.AssertWasCalled(p => p.PreProcess(null), p => p.IgnoreArguments()); }