Crawl() public method

public Crawl ( ) : void
return void
Ejemplo n.º 1
0
        public void GenerateTests()
        {
            var creationTimestamp = DateTime.Now;
            // Sets up crawlers.
            List <Crawler> crawlers = new List <Crawler>();

            foreach (BrowserConfigElement browserConfig in Config.Browsers)
            {
                Browser browser = new Browser(browserConfig, Config.NetworkCredentials);

                Crawler crawler = new Crawler(Config.Seeds, browser, Config.RecursionLimit);
                crawlers.Add(crawler);
            }

            // Start crawling.
            var results = new List <HttpRequestResult>();

            foreach (var crawler in crawlers)
            {
                crawler.Crawl();
                results.AddRange(crawler.HttpRequestResults);
            }

            var startTime = DateTime.Now;

            // Creates the test results and writes them to a file.
            var file = new FileInfo(Config.TestResultsFile);

            if (file.Exists)
            {
                file.Delete();
            }
            var writer = new WebTestXmlWriter();

            var finishTimestamp = DateTime.Now;

            var testRun = (new TestResultsFactory()).GenerateTestRun(Config.Name,
                                                                     Config.Description,
                                                                     startTime,
                                                                     finishTimestamp,
                                                                     creationTimestamp,
                                                                     startTime,
                                                                     results);

            var streamWriter = file.CreateText();

            writer.Write(streamWriter, testRun);
            streamWriter.Flush();
            streamWriter.Close();
        }
        public void GenerateTests()
        {
            var creationTimestamp = DateTime.Now;
            // Sets up crawlers.
            List<Crawler> crawlers = new List<Crawler>();
            foreach (BrowserConfigElement browserConfig in Config.Browsers)
            {
                Browser browser = new Browser(browserConfig, Config.NetworkCredentials);

                Crawler crawler = new Crawler(Config.Seeds, browser, Config.RecursionLimit);
                crawlers.Add(crawler);
            }

            // Start crawling.
            var results = new List<HttpRequestResult>();
            foreach (var crawler in crawlers)
            {
                crawler.Crawl();
                results.AddRange(crawler.HttpRequestResults);
            }

            var startTime = DateTime.Now;

            // Creates the test results and writes them to a file.
            var file = new FileInfo(Config.TestResultsFile);
            if (file.Exists) file.Delete();
            var writer = new WebTestXmlWriter();

            var finishTimestamp = DateTime.Now;

            var testRun = (new TestResultsFactory()).GenerateTestRun(Config.Name,
                                                                     Config.Description,
                                                                     startTime,
                                                                     finishTimestamp,
                                                                     creationTimestamp,
                                                                     startTime,
                                                                     results);

            var streamWriter = file.CreateText();
            writer.Write(streamWriter, testRun);
            streamWriter.Flush();
            streamWriter.Close();
            
        }
        private Crawler TestCrawlerMethod(string path, int expectedCount, int recursionLimit)
        {
            List<string> uriList = new List<String>();
            uriList.Add(path);

            Crawler crawler = new Crawler(uriList, new Browser(), recursionLimit);

            crawler.Crawl();

            foreach (HttpRequestResult result in crawler.HttpRequestResults)
            {
                try
                {
                    if (result.Error != null)
                        Console.WriteLine("The error property indicated a {1}, at {0} with the message, \"{2}\"", result.Error.AbsoluteUri.ToString() ?? "null", result.Error.HttpCode.ToString() ?? "null", result.Error.Message.ToString() ?? "null");
                    else if (result.ContentType != null && result.IsHtml && result.Content != null)
                        Console.WriteLine("Content for requestUrl, {0}, is as follows:\n{1}", result.RequestUrl, result.Content);
                    else if (result.ContentType == null)
                        Console.WriteLine("ContentType for requestUrl, {0}, is null.", result.RequestUrl);
                    else if (!result.IsHtml)
                        Console.WriteLine("ContentType for requestUrl, {0}, is not html.", result.RequestUrl);
                    else if (result.Content == null)
                        Console.WriteLine("Content for requestUrl, {0}, is null.", result.RequestUrl);
                    else
                        Console.WriteLine("Problem writing result information to console.");
                }
                catch (Exception ex)
                {
                    Console.WriteLine("The following exception occurred while attempting to write information about the reuslt.");
                    Console.WriteLine(ex);
                }
            
            }

            Assert.AreEqual(expectedCount, crawler.HttpRequestResults.Count);

            AssertLinksFromRemoteSiteNotRetrieved(crawler);
            AssertLinksNullStateForCssAndHtmlTypes(crawler);
            AssertBadLinksHaveNullAbsoluteUriAndPopulatedEx(crawler);

            return crawler;
        }