/// <summary>
 /// Get request to a website
 /// </summary>
 /// <param name="uri">Uri to request</param>
 /// <param name="host">Host for header</param>
 /// <param name="cookie">Cookie for authentication</param>
 /// <param name="referer">Referer for header</param>
 /// <param name="action">Callback</param>
 /// <param name="userAgent">Useragent for header</param>
 /// <remarks>Requires refactoring, make it more general and replace CrawlString with it</remarks>
 public static void Get(String uri, String host, Cookie cookie, String referer, Action<PostResult> action, String userAgent)
 {
     using (var client = new WebClientEx())
     {
         if (cookie != null)
         {
             client.CookieContainer.Add(cookie);
         }
         client.Headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
         client.Headers["Accept-Language"] = "de-de,de;q=0.8,en-us;q=0.5,en;q=0.3";
         client.Headers["Cache-Control"] = "no-cache";
         client.Headers["User-Agent"] = userAgent;
         if (!String.IsNullOrEmpty(referer))
         {
             client.Headers["Referer"] = referer;
         }
         if (!String.IsNullOrEmpty(host))
         {
             client.Headers["Host"] = host;
         }
         client.Headers["X-Requested-With"] = "XMLHttpRequest";
         var response = client.DownloadString(uri);
         PostResult ps = new PostResult() { Result = response };
         action(ps);
     }
 }
 public void EatCookie(string sessionKey)
 {
     _client = new WebClientEx();
     var c = new Cookie("s", sessionKey, "/", TradingPostHost);
     _client.CookieContainer.Add(c);
 }