public void SimpleRegexRulesHttpUpdater_should_not_update_rules_when_remote_file_not_found() { // Arrange var anyUrl = "http://example.com/notfound"; var rules = new SimpleRegexRulesDouble(); var httpClient = new HttpClientDouble(); #if (!NETSTANDARD1_0) httpClient.Setup_GetString(new System.Net.Http.HttpRequestException("404 (Not Found).")); #else httpClient.Setup_GetString(new System.Net.WebException("(404) Not Found.")); #endif var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient); // Act try { sut.UpdateAsync().Wait(); // Assert Assert.Fail("Update should throw exception when URL not found"); } catch { // Assert Assert.AreEqual(0, rules.Count_Update); } }
public void SimpleRegexRulesHttpUpdater_should_not_update_rules_when_remote_file_not_found_real_request() { // Arrange var notFoundUrl = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/notfound"; var originalRegex = new Regex(".*"); var rules = new SimpleRegexRules(new[] { originalRegex }); var sut = new SimpleRegexRulesHttpUpdater(notFoundUrl, rules); Assert.AreEqual(1, rules.Blacklist.Count); // Act try { sut.UpdateAsync().Wait(); // Assert Assert.Fail("Update should throw exception when URL not found"); } catch { // Assert Assert.AreEqual(1, rules.Blacklist.Count); Assert.AreEqual(originalRegex, rules.Blacklist.First()); } }
public void SimpleRegexRulesHttpUpdater_should_not_update_list_when_response_is_not_modified() { // Arrange var anyUrl = "http://example.com/links-blacklist.txt"; var originalRegex = new Regex(".*"); var rules = new SimpleRegexRules(new[] { originalRegex }); var originalList = rules.Blacklist; var httpClient = new HttpClientDouble(); httpClient.Setup_GetString(new SimplifiedHttpResponse() { Body = "0\r\n 1 \n2\r\n3", NotModified = true }); var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient); Assert.AreEqual(1, rules.Blacklist.Count); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Blacklist.Count); Assert.AreEqual(originalRegex, rules.Blacklist.First()); Assert.AreSame(originalList, rules.Blacklist); }
public void RecurringWorker_should_execute_tasks_periodically() { // Arrange var counter = 0; var due = TimeSpan.FromMilliseconds(600); var period = TimeSpan.FromMilliseconds(150); var testTime = TimeSpan.FromMilliseconds(1800); var expectedCount = 9; var tolerance = 4; Func <Task> testAction = () => { counter++; return(TaskUtilities.CompletedTask); }; using (var sut = new RecurringWorker(testAction, due, period)) { Assert.AreEqual(0, counter); // Act Delay(testTime); } // Assert #if (DNXCORE50) Assert.InRange(counter, expectedCount - tolerance, expectedCount + tolerance); #else Assert.GreaterOrEqual(counter, expectedCount - tolerance); Assert.LessOrEqual(counter, expectedCount + tolerance); #endif }
public void CheckAsync_should_identify_dangerous_urls() { // Arrange var sut = new SimpleRegexUrlChecker(new[] { @"^.*jpe082ver\.info.*$", @"^.*ntfl-promo2017.info.*$" }); var url1 = "http://www.jpe082ver.info/test"; var url2 = "https://antfl-promo2017.info"; // Act var result1 = sut.CheckAsync(url1).Result; // Assert Assert.AreEqual(url1, result1.Url); Assert.AreEqual(ThreatType.Unknow, result1.ThreatType); Assert.IsFalse(result1.IsSafe); // Act var result2 = sut.CheckAsync(url2).Result; // Assert Assert.AreEqual(url2, result2.Url); Assert.AreEqual(ThreatType.Unknow, result2.ThreatType); Assert.IsFalse(result2.IsSafe); }
public void SimpleRegexRulesHttpUpdater_should_update_rules_reading_remote_file_real_request() { // Arrange var realUrl = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/links-blacklist.txt"; var rules = new SimpleRegexRules(); var sut = new SimpleRegexRulesHttpUpdater(realUrl, rules); Assert.IsFalse(rules.Blacklist.Any()); // Act sut.UpdateAsync().Wait(); // Assert Assert.IsTrue(rules.Blacklist.Any()); }
public void SimpleRegexRulesHttpUpdater_should_update_rules() { // Arrange var rules = new SimpleRegexRulesDouble(); var anyUrl = "http://example.com/rules"; var httpClient = new HttpClientDouble(); httpClient.Setup_GetString(".*"); var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient); Assert.AreEqual(0, rules.Count_Update); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Count_Update); }
public void Check_should_identify_safe_urls() { // Arrange var sut = new SimpleRegexUrlChecker(new[] { @"^.*jpe082ver\.info.*$", @"^.*ntfl-promo2017\.info.*$" }); var url = "http://www.safe.info/test"; // Act var result = sut.Check(url); // Assert Assert.AreEqual(url, result.Url); Assert.IsTrue(result.IsSafe); Assert.AreEqual(ThreatType.NoThreat, result.ThreatType); }
public void SimpleRegexUrlChecker_should_respond_to_rules_updating() { // Arrange var rules = new SimpleRegexRules(); var sut = new SimpleRegexUrlChecker(rules); var url = "http://www.jpe082ver.info/test"; Assert.IsTrue(sut.Check(url).IsSafe); // Act rules.Update(new[] { new Regex(@"^.*jpe082ver\.info.*$") }); var result = sut.Check(url); // Assert Assert.AreEqual(url, result.Url); Assert.AreEqual(ThreatType.Unknow, result.ThreatType); Assert.IsFalse(result.IsSafe); }
public void RecurringWorker_should_execute_tasks_periodically() { // Arrange var counter = 0; var anyUrl = "http://example.com/links-blacklist.txt"; var httpClient = new HttpClientDouble(); httpClient.Setup_GetString(() => { counter++; return(new SimplifiedHttpResponse() { Body = ".*", Etag = null, NotModified = false }); }); var sut = new SimpleRegex.SimpleRegexRulesHttpUpdater(anyUrl, httpClient); var due = TimeSpan.FromMilliseconds(600); var period = TimeSpan.FromMilliseconds(150); var testTime = TimeSpan.FromMilliseconds(1800); var expectedCount = 9; var tolerance = 4; // Act sut.UpdatePeriodically(due, period); Assert.AreEqual(0, counter); Delay(testTime); // Assert #if (DNXCORE50) Assert.InRange(counter, expectedCount - tolerance, expectedCount + tolerance); #else Assert.GreaterOrEqual(counter, expectedCount - tolerance); Assert.LessOrEqual(counter, expectedCount + tolerance); #endif }
public void SimpleRegexRulesHttpUpdater_should_read_all_remote_lines() { // Arrange var anyUrl = "http://example.com/links-blacklist.txt"; var httpClient = new HttpClientDouble(); httpClient.Setup_GetString("0\r\n 1 \n2\r\n3"); var sut = new SimpleRegexRulesHttpUpdater(anyUrl, httpClient); Assert.IsFalse(sut.Rules.Blacklist.Any()); // Act sut.UpdateAsync().Wait(); // Assert Assert.IsTrue(sut.Rules.Blacklist.Any()); Assert.AreEqual(4, sut.Rules.Blacklist.Count); Assert.AreEqual("0", sut.Rules.Blacklist[0].ToString()); Assert.AreEqual("1", sut.Rules.Blacklist[1].ToString()); Assert.AreEqual("2", sut.Rules.Blacklist[2].ToString()); Assert.AreEqual("3", sut.Rules.Blacklist[3].ToString()); }
public void SimpleRegexRulesHttpUpdater_should_not_update_list_when_response_is_not_modified_real_request() { // Arrange var realUrl = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/links-blacklist.txt"; var rules = new SimpleRegexRulesDouble(); var sut = new SimpleRegexRulesHttpUpdater(realUrl, rules); Assert.AreEqual(0, rules.Count_Update); sut.UpdateAsync().Wait(); Assert.AreEqual(1, rules.Count_Update); Assert.IsTrue(rules.Blacklist.Any()); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Count_Update); // Act sut.UpdateAsync().Wait(); // Assert Assert.AreEqual(1, rules.Count_Update); }