public void IsPageToBeProcessed_Returns_False_If_HttpStatusCode_Is_Not_Ok() { //Arrange var mockProvider = new Mock <ILogicProvider>(); var mockRepo = new Mock <IRepository>(); var uri = new Uri("http://www.x.com"); var code = HttpStatusCode.PartialContent; #region Set expectations mockRepo.Setup(m => m.IsBlackListed(It.IsAny <string>())) .Returns(false); mockRepo.Setup(m => m.IsPageProcessed(It.IsAny <string>())) .Returns(false); #endregion //Act var processor = new CrawlDaddy(mockProvider.Object, mockRepo.Object); var result = processor.IsPageToBeProcessed(uri, code); //Assert Assert.False(result); }
public void IsPageToBeProcessed_Returns_True_If_Status_Is_Ok_And_Url_Is_Not_Blacklisted_Or_Processed() { //Arrange var mockProvider = new Mock <ILogicProvider>(); var mockRepo = new Mock <IRepository>(); var uri = new Uri("http://www.x.com"); var code = HttpStatusCode.OK; #region Set expectations mockRepo.Setup(m => m.IsBlackListed(uri.GetBaseDomain())) .Returns(false); mockRepo.Setup(m => m.IsPageProcessed("blah")) .Returns(false); #endregion //Act var processor = new CrawlDaddy(mockProvider.Object, mockRepo.Object); var result = processor.IsPageToBeProcessed(uri, code); //Assert Assert.True(result); }
public void IsPageToBeProcessed_Returns_True_If_Status_Is_Ok_And_Url_Is_Not_Blacklisted_Or_Processed() { //Arrange var mockProvider = new Mock<ILogicProvider>(); var mockRepo = new Mock<IRepository>(); var uri = new Uri("http://www.x.com"); var code = HttpStatusCode.OK; #region Set expectations mockRepo.Setup(m => m.IsBlackListed(uri.GetBaseDomain())) .Returns(false); mockRepo.Setup(m => m.IsPageProcessed("blah")) .Returns(false); #endregion //Act var processor = new CrawlDaddy(mockProvider.Object, mockRepo.Object); var result = processor.IsPageToBeProcessed(uri, code); //Assert Assert.True(result); }
public static ICrawlDaddy CreateAndInitCrawler(int sessionId, int crawlerId, string seedUrl, IRepository repo) { var daddy = new CrawlDaddy(new LogicProvider(), repo); daddy.InitializeCrawler(seedUrl, sessionId, crawlerId); daddy.DomainCrawlStarted += daddy_DomainCrawlStarting; daddy.DomainCrawlEnded += daddy_DomainCrawlEnded; daddy.ExternalLinksFound += daddy_ExternalLinksFound; daddy.LinkCrawlCompleted += daddy_LinkCrawlCompleted; return daddy; }
public static ICrawlDaddy CreateAndInitCrawler(int sessionId, int crawlerId, string seedUrl, IRepository repo) { var daddy = new CrawlDaddy(new LogicProvider(), repo); daddy.InitializeCrawler(seedUrl, sessionId, crawlerId); daddy.DomainCrawlStarted += daddy_DomainCrawlStarting; daddy.DomainCrawlEnded += daddy_DomainCrawlEnded; daddy.ExternalLinksFound += daddy_ExternalLinksFound; daddy.LinkCrawlCompleted += daddy_LinkCrawlCompleted; return(daddy); }
public void IsPageToBeProcessed_Returns_False_If_Url_Is_Blacklisted() { //Arrange var mockProvider = new Mock<ILogicProvider>(); var mockRepo = new Mock<IRepository>(); var uri = new Uri("http://www.x.com"); var code = HttpStatusCode.OK; #region Set expectations mockRepo.Setup(m => m.IsBlackListed(It.IsAny<string>())) .Returns(true) .Verifiable(); #endregion //Act var processor = new CrawlDaddy(mockProvider.Object, mockRepo.Object); var result = processor.IsPageToBeProcessed(uri, code); //Assert Assert.False(result); mockRepo.Verify(); }