public async Task GetTest() { // Arrange const int reqTimeout = 20; const string reqDomain = "https://sub.domain.com"; var reqUrl = $"{reqDomain}/firstpage"; var expectedPages = new[] { new PageDto($"{reqUrl}/about", new ConcurrentDictionary <string, short>()), new PageDto(reqUrl, new ConcurrentDictionary <string, short>()) }; spiderManagerMock.Setup(spider => spider.CreateSiteMap(reqUrl, It.IsAny <CancellationToken>())).ReturnsAsync(expectedPages); var controller = new CrawlController(new NullLogger <CrawlController>(), spiderManagerMock.Object); // Act var result = (await controller.Post(reqUrl, reqTimeout)).Result as OkObjectResult; // Assert Assert.IsNotNull(result); var dto = result.Value as SiteMap; Assert.AreEqual(reqUrl, dto.StartUrl); Assert.AreEqual(reqDomain, dto.RequestedDomain); Assert.IsTrue(dto.Completed); Assert.AreEqual(expectedPages, dto.Pages); }
public async Task GetTestBadRequest(string reqUrl) { // Arrange const int reqTimeout = 20; var controller = new CrawlController(new NullLogger <CrawlController>(), spiderManagerMock.Object); // Act var result = await controller.Post(reqUrl, reqTimeout); // Assert Assert.That(result.Result is BadRequestResult); }
public void GivenPostIsCalledThenCollectAndSaveAreCalled() { var url = "https://www.bbc.co.uk/news/uk-politics-45621354"; var collectMock = new Mock <ICollector>(); collectMock.Setup(c => c.Collect(url)).Returns(new List <WordMetric>()); var repositoryMock = new Mock <IWordRepository>(); var sut = new CrawlController(collectMock.Object, repositoryMock.Object); sut.Post(new FormData { url = url }); collectMock.Verify(c => c.Collect(url), Times.Once); repositoryMock.Verify(r => r.Save(It.IsAny <List <WordMetric> >()), Times.Once); }