public void Scrape_One_ImporterCalledOnce() { _parser.Setup(p => p.Parse(_page1)).Returns(new Post[30]); var scraper = new HackerNewsScraper(_importer.Object, _parser.Object, _formatter.Object, _exporter.Object); scraper.Scrape(1); _importer.Verify(i => i.Import(It.IsAny<string>()), Times.Once); }
public void Scrape_One_FormatterCalledOnceOnListOfCountOne() { _parser.Setup(p => p.Parse(_page1)).Returns(new Post[30]); var scraper = new HackerNewsScraper(_importer.Object, _parser.Object, _formatter.Object, _exporter.Object); scraper.Scrape(1); _formatter.Verify(i => i.Format(It.Is<List<Post>>(p=>p.Count==1)), Times.Once); }
private static void Main(string[] args) { var numItems = 30; if (args.Length != 2 || args[0].ToLower() != "--posts" || !int.TryParse(args[1], out numItems)) { Console.WriteLine("Usage: hackernews --posts n"); Console.WriteLine("Where: --posts n........how many posts to print. A positive integer <= 100."); Environment.Exit(0); } var scraper = new HackerNewsScraper(); var cts = new CancellationTokenSource(TimeSpan.FromSeconds(_timeoutSecs)); IList <HnItem> itemResults = new List <HnItem>(); var scrapeItemsAsync = scraper.ScrapeItemsAsync(numItems, cts); try { scrapeItemsAsync.Wait(cts.Token); itemResults = scrapeItemsAsync.Result; } catch (OperationCanceledException) { Console.Error.WriteLine("The scrape operation timed out."); Environment.Exit(1); } catch (AggregateException e) { Console.Error.WriteLine(e.InnerException.Message); Environment.Exit(2); } Console.WriteLine(JsonConvert.SerializeObject(itemResults.OrderBy(x => x.Rank).Take(numItems).ToList(), Formatting.Indented)); }
public void Scrape_One_FormatterCalledOnceOnListStartingWithFirstPost() { var parsed = new Post[30]; _parser.Setup(p => p.Parse(_page1)).Returns(parsed); var scraper = new HackerNewsScraper(_importer.Object, _parser.Object, _formatter.Object, _exporter.Object); scraper.Scrape(1); _formatter.Verify(i => i.Format(It.Is<List<Post>>(p => p[0] == parsed[0])), Times.Once); }
public void Scrape_OneHundred_ImporterCalledFourTimes() { _parser.Setup(p => p.Parse(_page1)).Returns(new Post[30]); _parser.Setup(p => p.Parse(_page2)).Returns(new Post[29]); _parser.Setup(p => p.Parse(_page3)).Returns(new Post[28]); _parser.Setup(p => p.Parse(_page4)).Returns(new Post[27]); var scraper = new HackerNewsScraper(_importer.Object, _parser.Object, _formatter.Object, _exporter.Object); scraper.Scrape(100); _importer.Verify(i => i.Import(It.IsAny<string>()), Times.Exactly(4)); }
public void SetUp() { this.mockSystemWrapper = new Mock <SystemWrapper>(MockBehavior.Strict); this.mockSystemWrapper.Setup(x => x.WriteLine(It.IsNotNull <object>())); this.mockSystemWrapper.Setup(x => x.WriteInvalidNumOfPosts()); this.mockSystemWrapper.Setup(x => x.ExitAppWithErrorCode()); this.mockSystemWrapper.Setup(x => x.WriteUsageInformation()); this.mockJsonWrapper = new Mock <JsonWrapper>(MockBehavior.Strict); this.mockJsonWrapper.Setup(x => x.SerializeObject(It.IsNotNull <object>())).Returns(string.Empty); this.mockDataService = new Mock <DataService>(MockBehavior.Strict, null, null); this.mockDataService.Setup(x => x.GetTopPosts(It.IsAny <int>())).Returns(new List <Post>()); this.hackerNewsScraper = new HackerNewsScraper( this.mockSystemWrapper.Object, this.mockDataService.Object, this.mockJsonWrapper.Object); }
public void Scrape_Zero_ThrowsArgumentOutOfRangeException() { var scraper = new HackerNewsScraper(_importer.Object, _parser.Object, _formatter.Object, _exporter.Object); Assert.ThrowsException<ArgumentOutOfRangeException>(() => scraper.Scrape(0)); }
public void Setup() { scraper = new HackerNewsScraper(http.Object); }