public void does_not_identify_regular_connection_as_crawler()
        {
            var sut     = new UserAgentBasedCrawlerDetector();
            var entry   = new TrafficReportEntry("1.1.1.1", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1", 1, 1);
            var crawler = sut.Recognize(entry);

            crawler.Should().Be(Crawler.Unrecognized);
        }
        public void detects_googlebot()
        {
            var userAgent = "Googlebot/2.1 (+http://www.google.com/bot.html)".CorrectSpacesInUserAgentBecauseTheyWontWorkInLogParser();
            var sut       = new UserAgentBasedCrawlerDetector();
            var entry     = new TrafficReportEntry("1.1.1.1", userAgent, 1, 1);
            var crawler   = sut.Recognize(entry);

            crawler.Should().Be(Crawler.Googlebot);
        }
Example #3
0
        public Crawler Recognize(TrafficReportEntry entry)
        {
            foreach (var crawler in this.knownCrawlers)
            {
                if (crawler.Key.CorrectSpacesInUserAgentBecauseTheyWontWorkInLogParser() == entry.UserAgent)
                {
                    return(crawler.Value);
                }
            }

            return(Crawler.Unrecognized);
        }