public void SimpleRegexRulesHttpUpdater_should_not_update_rules_when_remote_file_not_found() { // Arrange var anyUrl = "http://example.com/notfound"; var rules = new SimpleRegexRulesDouble(); var httpClient = new HttpClientDouble(); #if (!NETSTANDARD1_0) httpClient.Setup_GetString(new System.Net.Http.HttpRequestException("404 (Not Found).")); #else httpClient.Setup_GetString(new System.Net.WebException("(404) Not Found.")); #endif var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient); // Act try { sut.UpdateAsync().Wait(); // Assert Assert.Fail("Update should throw exception when URL not found"); } catch { // Assert Assert.AreEqual(0, rules.Count_Update); } }
public void SimpleRegexRulesHttpUpdater_should_update_rules() { // Arrange var rules = new SimpleRegexRulesDouble(); var anyUrl = "http://example.com/rules"; var httpClient = new HttpClientDouble(); httpClient.Setup_GetString(".*"); var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient); Assert.AreEqual(0, rules.Count_Update); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Count_Update); }
public void SimpleRegexRulesHttpUpdater_should_not_update_list_when_response_is_not_modified_real_request() { // Arrange var realUrl = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/links-blacklist.txt"; var rules = new SimpleRegexRulesDouble(); var sut = new SimpleRegexRulesHttpUpdater(realUrl, rules); Assert.AreEqual(0, rules.Count_Update); sut.UpdateAsync().Wait(); Assert.AreEqual(1, rules.Count_Update); Assert.IsTrue(rules.Blacklist.Any()); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Count_Update); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Count_Update); }