public IHttpActionResult GeoSpokeTo(int houseNumber = 1, string road = "", string town = "", string state = "", string zip = "") { try { Spoketo myScraper = ScraperFactory.Create <Spoketo>(new CrankyAddress { state = state, city = town, road = road, housenum = houseNumber }); return(Ok(myScraper.ScraperResults())); } catch (Exception ex) { return(Ok(new GenericResult { result = "0", payload = ex.Message })); } }
public void Execute(IJobExecutionContext context) { _logger.Trace("Starting scraping job"); ScraperConfig config = new ScraperConfig { UserName = _configuration.AppSettings["UserName"], Password = _configuration.AppSettings["Password"], Url = _configuration.AppSettings["Url"], LoginUrl = _configuration.AppSettings["LoginUrl"], ApiUrl = _configuration.AppSettings["FeedUrl"], Proxies = new [] { "91.142.84.182:3128" } //TODO File.ReadAllLines(@"Data\fastproxies.txt") }; using (NewsFeedScraper scraper = _scraperFactory.Create(config)) { try { scraper.GoToDashboard(config); Task.Delay(TimeSpan.FromSeconds(10)).Wait(); Feed feed = scraper.TryScrapeFeed(config); if (feed != null && feed.Items != null) { foreach (var item in feed.Items) { _logger.Trace($"{item.Id} - {item.JobString} - {item.Title} - {item.Url} - {item.Description}"); } _dataService.UpdateFeedAsync(feed).Wait(); } } catch (Exception e) { _logger.Error(e); // take screenshot scraper.TakeScreenshot(); scraper.SaveSource(); } } }