public bool WasAlreadyTested([NotNull] IFeature feature, [NotNull] ContainerTest containerTest) { TestedRow testedRow = GetTestedRow(feature); return(testedRow != null && testedRow.WasTestedFor(containerTest)); }
protected override ITest CreateTestInstance(object[] args) { ContainerTest containerTest = CreateAngleTest(args); containerTest.AngleUnit = AngleUnit.Degree; return(containerTest); }
private ContainerTestTimes GetContainerTestTimes( [NotNull] ContainerTest containerTest) { ContainerTestTimes result; if (!_containerTestTimes.TryGetValue(containerTest, out result)) { result = new ContainerTestTimes(); _containerTestTimes.Add(containerTest, result); } return(result); }
private int TestRow([NotNull] TestRow testRow, int occurance, [NotNull] ContainerTest containerTest, [NotNull] ICollection <ContainerTest> failedTests, out bool applicable) { applicable = true; IDataReference dataReference = testRow.DataReference; try { return(dataReference.Execute(containerTest, occurance, out applicable)); } catch (TestException e) { _msg.Error(string.Format("Container test execution failed: {0}", e.Message), e); failedTests.Add(containerTest); ReportErrorForFailedTest(containerTest, dataReference, string.Format("Test failed: {0}", e.Message)); return(0); } catch (TestRowException e) { _msg.Error(string.Format("Container test execution failed: {0}", e.Message), e); ReportErrorForFailedTest(containerTest, dataReference, string.Format("Test failed for row: {0}", e.Message)); return(0); } catch (Exception e) { _msg.Error(string.Format("Container test execution failed: {0}", e.Message), e); var rowReference = dataReference as RowReference; if (rowReference != null) { throw new TestContainerException(containerTest, rowReference.Row, e); } throw new TestContainerException(containerTest, dataReference.Extent, e); } }
public bool TryGetContainerTestTimes([NotNull] ContainerTest test, out double rowMilliseconds, out double tileCompletionMilliseconds) { ContainerTestTimes times; if (!_containerTestTimes.TryGetValue(test, out times)) { rowMilliseconds = 0; tileCompletionMilliseconds = 0; return(false); } rowMilliseconds = GetMilliseconds(times.RowTicks); tileCompletionMilliseconds = GetMilliseconds(times.TileCompletionTicks); return(true); }
public bool WasTestedFor([NotNull] ContainerTest containerTest) { return(_reducedTests == null || _reducedTests.Contains(containerTest)); }
public static void Main(string[] args) { ContainerTest t = new ContainerTest(); }
private int ExecuteCore() { _stopExecute = false; // prepare _totalErrorCount = 0; _errorEventCount = 0; _cancelledErrorCount = 0; _errorAdministrator.Clear(); IList <ITest> nonContainerTests; TestUtils.ClassifyTests(_tests, _allowEditing, out _containerTests, out nonContainerTests); // execute non-container tests Execute(nonContainerTests); if (_stopExecute) { return(_totalErrorCount - _cancelledErrorCount); } _errorEventCount = _totalErrorCount; var failedTests = new List <ContainerTest>(); // iterate foreach (TestRow testRow in GetTestRows()) { // TODO when there is a selection of rows (passed using a // new property or method overload), ignore test rows that are not in the selection? // - currently all features in the selection box (as defined in the verification service) // are tested, and errors from features not in the selection are ignored ContainerTest precedingContainerTest = null; var occurrence = 0; var executedTestIndex = 0; int applicableTestCount = testRow.ApplicableTests.Count; // TODO: drop occurance, use new class Class_with_ContainerTest_and_InvolvedTableIndex instead of containerTest foreach (ContainerTest containerTest in testRow.ApplicableTests) { if (failedTests.Contains(containerTest)) { continue; } using (UseProgressWatch(Step.RowProcessing, Step.RowProcessed, executedTestIndex, applicableTestCount, containerTest)) { if (precedingContainerTest != null && precedingContainerTest == containerTest) { // row will be passed to same test again, in a different role occurrence++; } else { occurrence = 0; precedingContainerTest = containerTest; } bool rowApplicable; int origErrorEventCount = _errorEventCount; int testErrorCount = TestRow(testRow, occurrence, containerTest, failedTests, out rowApplicable); if (!rowApplicable) { continue; } int testErrorEventCount = _errorEventCount - origErrorEventCount; _totalErrorCount += testErrorCount; if (_totalErrorCount != _errorEventCount) { Assert.Fail( "Test '{0}' has inconsistent error count for row {1}: " + "returned count is {2:N0}, raised errors count is {3:N0}", containerTest.GetType(), testRow.DataReference.GetLongDescription(), testErrorCount, testErrorEventCount); } if (_stopExecute) { return(_totalErrorCount - _cancelledErrorCount); } } executedTestIndex++; } } Assert.AreEqual(_totalErrorCount, _errorEventCount, "more errors reported ({0:N0}) than thrown ({1:N0})", _totalErrorCount, _errorEventCount); OnProgressChanged(Step.Completed, 0, 1, string.Format("{0:N0} errors found", _totalErrorCount - _cancelledErrorCount)); return(_totalErrorCount - _cancelledErrorCount); }
void ITestContainer.UnsubscribeTestEvents(ContainerTest containerTest) { containerTest.TestingRow -= test_TestingRow; containerTest.QaError -= test_QaError; }
void ITestContainer.SubscribeTestEvents(ContainerTest containerTest) { containerTest.QaError += test_QaError; containerTest.TestingRow += test_TestingRow; }