public async Task TestScrapeCourses() { var htmlClient = new Fake.FakeHtmlClient() .OnGet(Urls.ProfessorPage(Sandy.NNumber), File.ReadAllText("Web/Pages/ComputingFall2019.html")) .DefaultToException(); var scraper = new ScraperEnv(htmlClient) .SaveCourses(out var courses) .Scraper(); var res = await scraper.ScrapeCoursesAsync(ComputingDepartment, Fall2019); Assert.False(res.IsError); Assert.True(courses.Distinct().Count() == 51); Assert.True(scraper.Errors.None()); }
public async Task TestScrapeTerms() { var htmlClient = new Fake.FakeHtmlClient() .OnGet(Urls.ProfessorPage(Sandy.NNumber), File.ReadAllText("Web/Pages/DepartmentSchedule.html")) .DefaultToException(); var scraper = new ScraperEnv(htmlClient) .SaveTerms(out var terms) .Scraper(); var res = await scraper.ScrapeTermsAsync(); Assert.False(res.IsError); Assert.True(terms.Any()); Assert.True(scraper.Errors.None()); }
public async Task TestScrapeProfessorEntries() { var htmlClient = new Fake.FakeHtmlClient() .OnGet(Urls.ProfessorPage(Sandy.NNumber), File.ReadAllText("Web/Pages/TestScrapeProfessorEntriesPage.html")) .DefaultToException(); var scraper = new ScraperEnv(htmlClient) .SaveEntries(out var entries) .Scraper(); var res = await scraper.ScrapeProfessorEntriesAsync(Sandy); Assert.False(res.IsError); Assert.True(entries.Count == 30); Assert.True(entries.All(x => x.Course != null)); Assert.True(entries.All(x => x.Professor != null)); Assert.True(entries.All(x => x.Term != null)); }
public async Task Setup() { var baseDirectory = Directory.GetParent(AppDomain.CurrentDomain.BaseDirectory)?.Parent?.Parent?.Parent ?.FullName; Directory.SetCurrentDirectory(baseDirectory); if (File.Exists(HtmlJsonPath)) { var bytes = File.ReadAllBytes(HtmlJsonPath); var str = await Fake.Compressor.Decompress(bytes); _client = await Fake.FakeHtmlClient.Deserialize(str); } else { _client = new Fake.FakeHtmlClient(); } _countOrig = _client.Count; _client.DefaultToWeb(); }