public void ExtractImageUrls_Should_Return_Collection_Of_Image_Src_Content() { // Arrange var mock = new Mock<IHtmlDownloader>(); mock.Setup(h => h.DownloadHtml(It.IsAny<string>())) .Returns( "<html>" + "<img src=\"nakov.png\"/>" + "<span>Hello</span>" + "<img src=\"courses/inner/background.jpeg\"/>" + "</html>"); //to throw exception -> //mock.Setup(h => h.DownloadHtml(It.Is((string url) => url == null))) // .Throws(new ArgumentNullException()); //var fakeHtmlDownloader = new FakeHtmlDownloader(); var crawler = new Crawler(mock.Object); var expectedImageUrls = new[] { // What to expect? "nakov.png", "courses/inner/background.jpeg" }; // Act var imageUrls = crawler.ExtractImageUrls(string.Empty) .ToList(); // Assert CollectionAssert.AreEqual(expectedImageUrls, imageUrls); }
static void Main() { var crawler = new Crawler(); var urls = crawler.ExtractImageUrls("http://clubz.bg/"); int count = 0; foreach (var url in urls) { Console.WriteLine("{0, -3}: {1}", count, url); count++; } }
private static void RunCrawler(StandardKernel kernel) { var htmlProvider = kernel.Get<IHtmlProvider>(); var crawler = new Crawler(htmlProvider); var urls = crawler.ExtractImageUrls("http://dariknews.bg/"); int count = 0; foreach (var url in urls) { Console.WriteLine("{0, -3}: {1}", count, url); count++; } }
public void ExtractImageUrls_Should_Return_Collection_Of_Image_Src_Content() { // Arrange var crawler = new Crawler(); var expectedImageUrls = new[] { // What to expect? "nakov.png", "courses/inner/background.jpeg" }; // Act var imageUrls = crawler.ExtractImageUrls(string.Empty) .ToList(); // Assert CollectionAssert.AreEqual(expectedImageUrls, imageUrls); }