public RxPricesController(IRxPriceRepository rxPriceRepository, IPriceCalculator priceCalculator, IWebScraper webScraper) { _rxPriceRepository = rxPriceRepository; _priceCalculator = priceCalculator; _webScraper = webScraper; }
public MenuManager(IMenuDao menuDao, IDateProvider dateProvider, IWebScraper webScraper, ILogger <MenuManager> logger) { this.menuDao = menuDao; this.dateProvider = dateProvider; this.webScraper = webScraper; this.logger = logger; }
public WDWDiningMenuSite(IWebScraper webScraper) { if (webScraper == null) { throw new ArgumentNullException("webScraper"); } this.webScraper = webScraper; }
public SeoCheckController(ILogger <SeoCheckController> logger , IRankedUrlDto rankedUrlDto , IWebScraper webScraper, ISeoCheckConfig seoCheckConfig) { _logger = logger; _rankedUrlDto = rankedUrlDto; _webScraper = webScraper; _seoCheckConfig = seoCheckConfig; }
public PhoneRankingService( IUnitOfWorkService unitOfWorkService, ICacheService cacheService, IWebScraper webScraper ) { _cacheService = cacheService; _unitOfWorkService = unitOfWorkService; _webScraper = webScraper; }
public FundsService ( IWebScraper webScraper, ILineMessageService lineMessageService, IMongoUnitOfWork mongoUnitOfWork ) { _webScraper = webScraper; _lineMessageService = lineMessageService; _mongoUnitOfWork = mongoUnitOfWork; }
public OilPriceService( ICacheService cacheService, IWebScraper webScraper, ILineMessageService lineMessageService, IMongoUnitOfWork mongoUnitOfWork) { _cacheService = cacheService; _webScraper = webScraper; _lineMessageService = lineMessageService; _mongoUnitOfWork = mongoUnitOfWork; }
public CremeFejanReader(IWebScraper scraper) : base(scraper) { _validHeaders = new HashSet <string>(StringComparer.OrdinalIgnoreCase) { "Måndag", "Tisdag", "Onsdag", "Torsdag", "Fredag", VeckansPasta, VeckansSallad }; _dateLookup = new Dictionary <string, DateTime>(StringComparer.OrdinalIgnoreCase) { { "Måndag", DateHelper.MondayThisWeek() }, { "Tisdag", DateHelper.TuesdayThisWeek() }, { "Onsdag", DateHelper.WednesdayThisWeek() }, { "Torsdag", DateHelper.ThursdayThisWeek() }, { "Fredag", DateHelper.FridayThisWeek() } }; }
public IWebScraper CreateWebScraper(ScraperType scraperType) { IWebScraper newWebScraperInstance = default; switch (scraperType) { case ScraperType.Unidentified: throw new Exception("Scraper Type Must Be Identified"); case ScraperType.HackerNewsScraper: newWebScraperInstance = new HackerNewsScraper(); break; } return(newWebScraperInstance); }
public UserAccController( IUserAccService addService, IPostsService postsService, UserManager <ApplicationUser> userManager, ISearchService searchService, IHostingEnvironment hostingEnviroment, IAppUserService appUserService, IWebScraper webScraper) { this.addService = addService; this.postsService = postsService; this.userManager = userManager; this.searchService = searchService; this.hostingEnviroment = hostingEnviroment; this.appUserService = appUserService; this.webScraper = webScraper; }
public EuropaReader(IWebScraper scraper) : base(scraper) { }
public TegeluddenReader(IWebScraper scraper) : base(scraper) { }
public AihayaReader(IWebScraper scraper) : base(scraper) { }
public CremeReader(IWebScraper scraper) : base(scraper) { }
public RxPriceRepository(RxContext context, IWebScraper webScraper, IConfiguration config) { _context = context; _webScraper = webScraper; _config = config; }
protected SabisReaderBase(IWebScraper scraper) : base(scraper) { }
public KalasetPaFyranReader(IWebScraper scraper) : base(scraper) { }
public void Setup() { webScraper = new WebScraper(); }
static async Task Main(string[] args) { //Http client will be used due provided simplified interface to make async requests HttpClient httpClient = new HttpClient(); //Will store webscraping process instances List <Task> scrapingExecutions = new List <Task>(); const string errorMessagePrefix = "Issue During Execution:"; //Used to decide if the console should remain open post execution bool consoleRemainOpen = false; string input = default; try { Console.WriteLine($"Start Of Application Execution: {DateTime.Now}"); //ToDo - Ensure a default configuration file is created in the required directory if it is not present //The configuration manipulator that will interact with the configuration file ConfigManipulator configManipulator = new ConfigManipulator(configurationFilePath: $@"{Directory.GetCurrentDirectory()}\{AppSettings.Default.ConfigFilePath}"); //A mapping that associates a moniker, uri and the webscraper type that should be used Dictionary <string, (string Uri, ScraperType ScraperType)> monikerUriMapping = configManipulator.LoadMonikerUriMappingDictionary(); //If arguments have been provided via the command line convert them to a string if (args.Length >= 1) { input = string.Join(Separators.SpaceSeparator, args); } //Load additional options provided by the configuration file bool interactionMode = configManipulator.InteractionMode; consoleRemainOpen = configManipulator.ConsoleRemainOpen; //Instantiate user input processor to ensure input is valid UserInputProcessor userInputProcessor = new UserInputProcessor(); //Will run at least once or continuously if interaction mode is true for (int i = 0; i < 1 || interactionMode; i++) { try { //Only read from the console if no CLI arguments have been provided if (string.IsNullOrEmpty(input)) { Console.WriteLine("Enter Argument:"); input = Console.ReadLine(); } //Validate and process input userInputProcessor.ProcessInput(input); //ToDo-- trigger cancellation token if it is able to be canceled to signal tasks instances they should stop //Break out of the loop if the exit command has been input if (userInputProcessor.Exit) { break; } //Throw an exception if the input is not valid if (!userInputProcessor.IsUserInputValid) { throw new Exception($"Ensure Input Is Valid: {input}"); } //Start a new scraping execution task instance if the input is valid scrapingExecutions.Add(Task.Run(async() => { try { //Ensures the moniker argument is valid e.g hackernews if (!monikerUriMapping.Keys.Contains(userInputProcessor.MonikerArgument)) { throw new Exception("Moniker Not Found In The Config Files Moniker Uri Mapping Property"); } //ToDo - only continue main process loop once the trailing arguments has been stored locally before userInputProcessor state reset Dictionary <string, string> trailingArguments = userInputProcessor.TrailingArguments; //Collect its associated uri to provide the webscraper string uri = monikerUriMapping[userInputProcessor.MonikerArgument].Uri; //Select the associated scraper type that should be used with the valid moniker argument switch (monikerUriMapping[userInputProcessor.MonikerArgument].ScraperType) { case ScraperType.HackerNewsScraper: //Initialize algorithm based on selected mapping item IWebScraper webScraper = ScraperFactory.CreateWebScraperInstance(ScraperType.HackerNewsScraper); await webScraper.ExecuteScraping(uri, httpClient, trailingArguments); break; default: //Will throw an exception if the associated web scraper is unidentified throw new Exception("Provided Web Scraper Is Unidentified"); } } catch (Exception ex) { //Display error information based on web scrapping execution errors //Log Error Message Console.WriteLine($"Issue During {userInputProcessor.MonikerArgument} Process: {ex.Message}"); } })); } catch (Exception ex) { //Ensures application can continue when running in interaction mode //If application was extended to run a collection of inputs, ensures collection items can be enumerated if errors occurs during process Console.WriteLine($"{errorMessagePrefix} {ex.Message}"); } //Clear input input = string.Empty; } } catch (Exception ex) { //Display error message that caused main execution to stop Console.WriteLine($"{errorMessagePrefix} {ex.Message}"); } finally { //Wait for scraping executions to complete if (scrapingExecutions.Count() >= 1) { await Task.WhenAll(scrapingExecutions); } httpClient.Dispose(); } Console.WriteLine($"End Of Application Execution: {DateTime.Now}"); //Temp Will Allow The Viewing Of The Results if (consoleRemainOpen) { Console.ReadLine(); } }
public KvartersmenynReaderBase(IWebScraper scraper) : base(scraper) { }
public EmployeesService(IEmployeesRepository employeesRepository, IWebScraper webScraper) { _employeesRepository = employeesRepository; _webScraper = webScraper; }
public TennishallenReader(IWebScraper scraper) : base(scraper) { }
public LansrattenReader(IWebScraper scraper) : base(scraper) { }
public CeneoController(IProductRepository productRepository, IWebScraper ceneoWebScraper, IMapper mapper) { _productRepository = productRepository; _ceneoWebScraper = ceneoWebScraper; _mapper = mapper; }
protected MenuReaderBase(IWebScraper scraper) { this.Scraper = scraper; }
public MainWindowViewModel(IWebScraper webScraper) { _webScraper = webScraper; Timesheet = webScraper.LoginAndGetTimesheet(); SaveCommand = new DelegateCommand(x => Save(), x => HasChanges()); }
public SearchRepository(IWebScraper webScraper, IParseContent parseContent) { _webScraper = webScraper; _parseContent = parseContent; }
public KleinsRioReader(IWebScraper scraper) : base(scraper) { }
public ScraperJob(IDataStore dataStore, IWebScraper webScraper) { this.dataStore = dataStore; this.webScraper = webScraper; }
public LottoNumberFetcher(ILogger <LottoNumberFetcher> log, IWebScraper webScraper, IDrawsRepo drawsRepo) { _webScraper = webScraper; _log = log; _drawsRepo = drawsRepo; }
public Downloader(ICookieProvider cookieProvider, IWebScraper webScraper, IWebsiteParser websiteParser) { _cookieProvider = cookieProvider; _webScraper = webScraper; _websiteParser = websiteParser; }