コード例 #1
0
        public void SimpleRegexRulesHttpUpdater_should_not_update_list_when_response_is_not_modified()
        {
            // Arrange
            var anyUrl        = "http://example.com/links-blacklist.txt";
            var originalRegex = new Regex(".*");
            var rules         = new SimpleRegexRules(new[] { originalRegex });
            var originalList  = rules.Blacklist;
            var httpClient    = new HttpClientDouble();

            httpClient.Setup_GetString(new SimplifiedHttpResponse()
            {
                Body        = "0\r\n  1  \n2\r\n3",
                NotModified = true
            });
            var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient);

            Assert.AreEqual(1, rules.Blacklist.Count);

            // Act
            sut.UpdateAsync().Wait();

            // Assert
            Assert.AreEqual(1, rules.Blacklist.Count);
            Assert.AreEqual(originalRegex, rules.Blacklist.First());
            Assert.AreSame(originalList, rules.Blacklist);
        }
コード例 #2
0
        public void SimpleRegexRulesHttpUpdater_should_not_update_rules_when_remote_file_not_found_real_request()
        {
            // Arrange
            var notFoundUrl   = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/notfound";
            var originalRegex = new Regex(".*");
            var rules         = new SimpleRegexRules(new[] { originalRegex });
            var sut           = new SimpleRegexRulesHttpUpdater(notFoundUrl, rules);

            Assert.AreEqual(1, rules.Blacklist.Count);

            // Act
            try
            {
                sut.UpdateAsync().Wait();

                // Assert
                Assert.Fail("Update should throw exception when URL not found");
            }
            catch
            {
                // Assert
                Assert.AreEqual(1, rules.Blacklist.Count);
                Assert.AreEqual(originalRegex, rules.Blacklist.First());
            }
        }
コード例 #3
0
        public void SimpleRegexRulesHttpUpdater_should_not_update_rules_when_remote_file_not_found()
        {
            // Arrange
            var anyUrl     = "http://example.com/notfound";
            var rules      = new SimpleRegexRulesDouble();
            var httpClient = new HttpClientDouble();

#if (!NETSTANDARD1_0)
            httpClient.Setup_GetString(new System.Net.Http.HttpRequestException("404 (Not Found)."));
#else
            httpClient.Setup_GetString(new System.Net.WebException("(404) Not Found."));
#endif
            var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient);

            // Act
            try
            {
                sut.UpdateAsync().Wait();

                // Assert
                Assert.Fail("Update should throw exception when URL not found");
            }
            catch
            {
                // Assert
                Assert.AreEqual(0, rules.Count_Update);
            }
        }
コード例 #4
0
        public void SimpleRegexRulesHttpUpdater_should_update_rules_reading_remote_file_real_request()
        {
            // Arrange
            var realUrl = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/links-blacklist.txt";
            var rules   = new SimpleRegexRules();
            var sut     = new SimpleRegexRulesHttpUpdater(realUrl, rules);

            Assert.IsFalse(rules.Blacklist.Any());

            // Act
            sut.UpdateAsync().Wait();

            // Assert
            Assert.IsTrue(rules.Blacklist.Any());
        }
コード例 #5
0
        public void SimpleRegexRulesHttpUpdater_should_update_rules()
        {
            // Arrange
            var rules      = new SimpleRegexRulesDouble();
            var anyUrl     = "http://example.com/rules";
            var httpClient = new HttpClientDouble();

            httpClient.Setup_GetString(".*");
            var sut = new SimpleRegexRulesHttpUpdater(anyUrl, rules, httpClient);

            Assert.AreEqual(0, rules.Count_Update);

            // Act
            sut.UpdateAsync().Wait();

            // Assert
            Assert.AreEqual(1, rules.Count_Update);
        }
コード例 #6
0
        public void RecurringWorker_should_execute_tasks_periodically()
        {
            // Arrange
            var counter    = 0;
            var anyUrl     = "http://example.com/links-blacklist.txt";
            var httpClient = new HttpClientDouble();

            httpClient.Setup_GetString(() =>
            {
                counter++;
                return(new SimplifiedHttpResponse()
                {
                    Body = ".*",
                    Etag = null,
                    NotModified = false
                });
            });
            var sut = new SimpleRegex.SimpleRegexRulesHttpUpdater(anyUrl, httpClient);

            var due           = TimeSpan.FromMilliseconds(600);
            var period        = TimeSpan.FromMilliseconds(150);
            var testTime      = TimeSpan.FromMilliseconds(1800);
            var expectedCount = 9;
            var tolerance     = 4;


            // Act
            sut.UpdatePeriodically(due, period);
            Assert.AreEqual(0, counter);
            Delay(testTime);

            // Assert
#if (DNXCORE50)
            Assert.InRange(counter, expectedCount - tolerance, expectedCount + tolerance);
#else
            Assert.GreaterOrEqual(counter, expectedCount - tolerance);
            Assert.LessOrEqual(counter, expectedCount + tolerance);
#endif
        }
コード例 #7
0
        public void SimpleRegexRulesHttpUpdater_should_read_all_remote_lines()
        {
            // Arrange
            var anyUrl     = "http://example.com/links-blacklist.txt";
            var httpClient = new HttpClientDouble();

            httpClient.Setup_GetString("0\r\n  1  \n2\r\n3");
            var sut = new SimpleRegexRulesHttpUpdater(anyUrl, httpClient);

            Assert.IsFalse(sut.Rules.Blacklist.Any());

            // Act
            sut.UpdateAsync().Wait();

            // Assert
            Assert.IsTrue(sut.Rules.Blacklist.Any());
            Assert.AreEqual(4, sut.Rules.Blacklist.Count);
            Assert.AreEqual("0", sut.Rules.Blacklist[0].ToString());
            Assert.AreEqual("1", sut.Rules.Blacklist[1].ToString());
            Assert.AreEqual("2", sut.Rules.Blacklist[2].ToString());
            Assert.AreEqual("3", sut.Rules.Blacklist[3].ToString());
        }
コード例 #8
0
        public void SimpleRegexRulesHttpUpdater_should_not_update_list_when_response_is_not_modified_real_request()
        {
            // Arrange
            var realUrl = "https://raw.githubusercontent.com/MakingSense/safe-browsing/resources/links-blacklist.txt";
            var rules   = new SimpleRegexRulesDouble();
            var sut     = new SimpleRegexRulesHttpUpdater(realUrl, rules);

            Assert.AreEqual(0, rules.Count_Update);
            sut.UpdateAsync().Wait();
            Assert.AreEqual(1, rules.Count_Update);
            Assert.IsTrue(rules.Blacklist.Any());

            // Act
            sut.UpdateAsync().Wait();

            // Assert
            Assert.AreEqual(1, rules.Count_Update);

            // Act
            sut.UpdateAsync().Wait();

            // Assert
            Assert.AreEqual(1, rules.Count_Update);
        }