/// <summary> /// Clean the database using the individual scrapers implementation for the website /// </summary> /// <param name="scraper">Each profile is valid for the individual scraper</param> static void CleanDB(PaginationScraper scraper) { Trace.TraceInformation("Cleaning the DB for duplicate values"); scraper.CleanData(); }
/// <summary> /// Export the scrapers data into a csv /// </summary> /// <param name="scraper">The scraper which implements its own rules for what data to export</param> /// <param name="outputFileName">The file path to save the csv</param> static void Export(PaginationScraper scraper, string outputFileName) { Trace.TraceInformation("Starting exporting the data to CSV file"); scraper.ExportToCSV(outputFileName, false); }
/// <summary> /// Start the scrape /// </summary> /// <param name="scraper">The individual website scraper</param> static void StartScrape(PaginationScraper scraper) { scraper.Start(); }