public void New_HTTPClient()
 {
     HTTPClient http = new HTTPClient("http://google.com");
     http.CheckStatus();
     Assert.That( http.Status == 200);
 }
        public void RunCrawler()
        {
            if (CheckPreviousState())
            {
                Init();

                while (CountToVisiteUrls() > 0)
                {

                    http = new HTTPClient(FirstUrls());
                    http.CheckStatus();
                    Parser.ObtainDocumentHTML(http.URL.AbsoluteUri);

                    Filter = new FilterURL(Extractor.ExtractUrls(Parser.HtmlDocument), param.Url.Host);

                    if (Filter.CleanedUrls != null)
                    {
                        foreach (string url in Filter.CleanedUrls)
                        {
                            if (!_urls.ContainsKey(new Uri(url)))
                            {
                                _urls.Add(new Uri(url), false);
                            }

                        }
                        _urls[http.URL] = true;
                    }
                    else
                    {
                        Logger.Error("Urls List returned from the Filter is empty", "Filter");
                    }

                }
            }
        }