public void GenerateTests()
        {
            var creationTimestamp = DateTime.Now;
            // Sets up crawlers.
            List<Crawler> crawlers = new List<Crawler>();
            foreach (BrowserConfigElement browserConfig in Config.Browsers)
            {
                Browser browser = new Browser(browserConfig, Config.NetworkCredentials);

                Crawler crawler = new Crawler(Config.Seeds, browser, Config.RecursionLimit);
                crawlers.Add(crawler);
            }

            // Start crawling.
            var results = new List<HttpRequestResult>();
            foreach (var crawler in crawlers)
            {
                crawler.Crawl();
                results.AddRange(crawler.HttpRequestResults);
            }

            var startTime = DateTime.Now;

            // Creates the test results and writes them to a file.
            var file = new FileInfo(Config.TestResultsFile);
            if (file.Exists) file.Delete();
            var writer = new WebTestXmlWriter();

            var finishTimestamp = DateTime.Now;

            var testRun = (new TestResultsFactory()).GenerateTestRun(Config.Name,
                                                                     Config.Description,
                                                                     startTime,
                                                                     finishTimestamp,
                                                                     creationTimestamp,
                                                                     startTime,
                                                                     results);

            var streamWriter = file.CreateText();
            writer.Write(streamWriter, testRun);
            streamWriter.Flush();
            streamWriter.Close();
            
        }
 private void AssertBadLinksHaveNullAbsoluteUriAndPopulatedEx(Crawler crawler)
 {
     foreach (var httpRequestResult in crawler.HttpRequestResults)
     {
         if (httpRequestResult.Links != null)
         {
             foreach (var link in httpRequestResult.Links)
             {
                 Assert.IsTrue(link.AbsoluteUri == null && link.Ex != null || link.AbsoluteUri != null && link.Ex == null);
                 
             }
         }
     }
 }
 private void AssertLinksNullStateForCssAndHtmlTypes(Crawler crawler)
 {
     foreach (var httpRequestResult in crawler.HttpRequestResults)
     {
         if (httpRequestResult.IsCss || httpRequestResult.IsHtml)
         {
             Assert.IsNotNull(httpRequestResult.Links);
         }
         else
         {
             Assert.IsNull(httpRequestResult.Links);
         }
     }
 }
 private void AssertLinksFromRemoteSiteNotRetrieved(Crawler crawler)
 {
     foreach (var httpRequestResult in crawler.HttpRequestResults)
     {
         if (httpRequestResult.Links != null && !crawler.GetSetOfCrawlableHosts().Contains(httpRequestResult.ResultUrl.Host))
         {
             foreach (var link in httpRequestResult.Links)
             {
                 Assert.IsTrue(link.WasRetrieved == false);
             }
         }
     }
 }
        private Crawler TestCrawlerMethod(string path, int expectedCount, int recursionLimit)
        {
            List<string> uriList = new List<String>();
            uriList.Add(path);

            Crawler crawler = new Crawler(uriList, new Browser(), recursionLimit);

            crawler.Crawl();

            foreach (HttpRequestResult result in crawler.HttpRequestResults)
            {
                try
                {
                    if (result.Error != null)
                        Console.WriteLine("The error property indicated a {1}, at {0} with the message, \"{2}\"", result.Error.AbsoluteUri.ToString() ?? "null", result.Error.HttpCode.ToString() ?? "null", result.Error.Message.ToString() ?? "null");
                    else if (result.ContentType != null && result.IsHtml && result.Content != null)
                        Console.WriteLine("Content for requestUrl, {0}, is as follows:\n{1}", result.RequestUrl, result.Content);
                    else if (result.ContentType == null)
                        Console.WriteLine("ContentType for requestUrl, {0}, is null.", result.RequestUrl);
                    else if (!result.IsHtml)
                        Console.WriteLine("ContentType for requestUrl, {0}, is not html.", result.RequestUrl);
                    else if (result.Content == null)
                        Console.WriteLine("Content for requestUrl, {0}, is null.", result.RequestUrl);
                    else
                        Console.WriteLine("Problem writing result information to console.");
                }
                catch (Exception ex)
                {
                    Console.WriteLine("The following exception occurred while attempting to write information about the reuslt.");
                    Console.WriteLine(ex);
                }
            
            }

            Assert.AreEqual(expectedCount, crawler.HttpRequestResults.Count);

            AssertLinksFromRemoteSiteNotRetrieved(crawler);
            AssertLinksNullStateForCssAndHtmlTypes(crawler);
            AssertBadLinksHaveNullAbsoluteUriAndPopulatedEx(crawler);

            return crawler;
        }
        private void TestCrawlerPerformanceGeneric(string path, int acceptablePerformance)
        {

            List<string> uriList = new List<String>();
            uriList.Add(path);
            Crawler crawler = new Crawler(uriList, new Browser(), 500);
            Thread thread = new Thread(crawler.Crawl);

            TimeSpan acceptableTimeSpan = new TimeSpan(0, 0, 0, 0, acceptablePerformance);

            Stopwatch watch = new Stopwatch();
            watch.Reset();
            watch.Start();
            thread.Start();
            thread.Join(acceptableTimeSpan);
            watch.Stop();

            Assert.IsFalse(thread.IsAlive);

            Console.WriteLine("Elapsed Milliseconds: " + watch.ElapsedMilliseconds);
            Console.WriteLine("crawler.Pages.Count(): " + crawler.HttpRequestResults.Count());
            foreach (var page in crawler.HttpRequestResults)
            {
                if (page != null)
                {
                    Console.Write(page.RequestUrl + " : HttpCode = ");
                    if (page.Error != null)
                        Console.Write(page.Error.HttpCode);
                    else
                        Console.Write("200");

                    Console.WriteLine();
                }
                else
                    Console.WriteLine("Found null page.");
            }

            Assert.IsTrue(watch.ElapsedMilliseconds <= acceptablePerformance);
        }