Ejemplo n.º 1
0
        public Feed TryScrapeFeed(ScraperConfig config)
        {
            CookieContainer cookieJar = new CookieContainer();
            var             cookies   = WebDriver.Manage().Cookies.AllCookies;

            foreach (var cookie in cookies)
            {
                cookieJar.Add(new Cookie(cookie.Name, cookie.Value, cookie.Path, cookie.Domain));
            }

            int maxRetries = 5;
            int retry      = 0;

            Logger.Trace("Downloading {0}", config.ApiUrl);

            Feed feed = null;

            while (true)
            {
                if (retry >= maxRetries)
                {
                    throw new ScraperException("Can't download project feed. Retries exceeded", config);
                }

                try
                {
                    using (var client = new CookieWebClient(cookieJar))
                    {
                        if (!ProxyAddress.IsNullOrEmpty())
                        {
                            client.Proxy = new WebProxy(ProxyAddress);
                        }

                        var mozilaAgent =
                            "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36";
                        client.Headers.Add("User-Agent", mozilaAgent);

                        // Call authenticated resources
                        string result = client.DownloadString(config.ApiUrl);

                        feed = JsonConvert.DeserializeObject <Feed>(result);
                    }

                    break;
                }
                catch (Exception e)
                {
                    Logger.Trace("Failed to download {0} page. Retry {1} of {2}", config.ApiUrl, retry, maxRetries);
                    Logger.Error(e);
                    retry++;
                    Task.Delay(TimeSpan.FromSeconds(10)).Wait();
                }
            }

            Logger.Info("Successfully downloaded {0}", config.ApiUrl);

            return(feed);
        }
Ejemplo n.º 2
0
        public void Login(ScraperConfig config)
        {
            var userNameInput = FindElementWaitSafe(By.CssSelector("#username"), 1);
            var passwordInput = FindElementWaitSafe(By.CssSelector("#passwd"), 1);
            var loginBtn      = FindElementWaitSafe(By.CssSelector("#login_btn"), 1);

            if (userNameInput == null || passwordInput == null || loginBtn == null)
            {
                throw new ScraperException("Invalid login page", config);
            }

            userNameInput.Clear();
            passwordInput.Clear();

            userNameInput.SendKeys(config.UserName);
            passwordInput.SendKeys(config.Password);

            loginBtn.Click();
        }
Ejemplo n.º 3
0
        public void GoToDashboard(ScraperConfig config)
        {
            int maxRetries = 5;
            int retry      = 0;

            while (retry < maxRetries)
            {
                GoToUrl(config.Url);
                if (Regex.IsMatch(WebDriver.Url, @"/login") || WebDriver.Url == config.Url)
                {
                    break;
                }

                retry++;
            }

            if (WebDriver.Url != config.Url)
            {
                Login(config);
            }
        }
Ejemplo n.º 4
0
 public ScraperException(string message, ScraperConfig config) : base(message)
 {
     Config = config;
 }