public void Add(PageToCrawl page) { if (page == null) { throw new ArgumentNullException(nameof(page)); } if (_crawledUrlRepo.AddIfNew(page.Uri)) { _pagesToCrawlRepo.Add(page); } }
public void Add(PageToCrawl page) { if (page == null) { throw new ArgumentNullException("page"); } if (_allowUriRecrawling || page.IsRetry) { _pagesToCrawlRepo.Add(page); } else { if (_crawledUrlRepo.AddIfNew(page.Uri)) { _pagesToCrawlRepo.Add(page); } } }
/// <summary> /// Schedules the param to be crawled /// </summary> public void Add(PageToCrawl page) { if (page == null) { throw new ArgumentNullException(nameof(page)); } if (AllowUriRecrawling || page.IsRetry) { PagesToCrawlRepository.Add(page); } else { if (CrawledUrlRepository.AddIfNew(page.Uri)) { PagesToCrawlRepository.Add(page); } } }
public void NoFalseNegativesTest() { // create input collection List <Uri> inputs = GenerateRandomDataList(10000); // instantiate filter and populate it with the inputs using (ICrawledUrlRepository uut = GetInstance()) { //If all were unique then they should all return "true" for Contains() foreach (Uri input in inputs) { Assert.IsTrue(uut.AddIfNew(input)); } //If all were added successfully then they should all return "true" for Contains() foreach (Uri input in inputs) { if (!uut.Contains(input)) { Assert.Fail("False negative: {0}", input); } } } }
public void AddIfNew_AddingUniqueUri_ReturnsTrue() { Assert.IsTrue(_unitUnderTest.AddIfNew(_uri1)); }