public async Task <IActionResult> Edit(int id, [Bind("ID,input,position,paid_ads,date")] ScraperModel scraperModel) { if (id != scraperModel.ID) { return(NotFound()); } if (ModelState.IsValid) { try { _context.Update(scraperModel); await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException) { if (!ScraperModelExists(scraperModel.ID)) { return(NotFound()); } else { throw; } } return(RedirectToAction(nameof(Index))); } return(View(scraperModel)); }
public async Task <IActionResult> Create([Bind("ID,input,position,paid_ads,date")] ScraperModel scraperModel) { if (ModelState.IsValid) { _context.Add(scraperModel); await _context.SaveChangesAsync(); return(RedirectToAction(nameof(Index))); } return(View(scraperModel)); }
public async Task <IActionResult> stringUpdate(string searchString) { if (searchString != null) { ViewBag.test = searchString; var url = "https://www.google.co.uk/search?q=" + searchString + "&num=100&filter=0&biw=1536&bih=698"; url = url.Replace(" ", "+"); ViewBag.newurl = url; var data = await HttpClientFactory.Create().GetStringAsync(url); // http get request - html code in stored in data variable var split_data = data.Split(_Idata.GoogleResultSplit()); // split string and place into array - string represents google search result (normal formatting) var paid_ads = data.Split(_Idata.PaidAdSplit()); // split string and place into array - string represents google search result (paid add formating) var googleP_counter_ns = 0; var total_ns = 0; var error_ns = 0; foreach (string s in split_data) { try { googleP_counter_ns += 1; if (s.Contains("www.infotrack.co.uk")) //looking for a string match in the first 100 good search results { if (total_ns != 0) { break; } else { total_ns = googleP_counter_ns; ViewBag.google_ns = googleP_counter_ns; } } else if (googleP_counter_ns == 101) { error_ns = googleP_counter_ns; ViewBag.google_error = error_ns; break; } } catch (Exception ex) { Console.WriteLine(ex.Message); //at a later date - map exception to a different view } } var new_model = new ScraperModel { input = searchString, position = total_ns, paid_ads = 0, date = DateTime.Now }; _context.Add(new_model); await _context.SaveChangesAsync(); } return(View("Calc")); }
public ActionResult <ScraperModel> Get(int id) { var scrapers = ScraperManager.Instance.GetRegisteredScrapers().ToList(); if (scrapers.Count <= id) { return(BadRequest("Invalid scraper index")); } var target = scrapers[id]; return(ScraperModel.CreateFromScraper(target)); }
public static ScraperModel DisplayAllMapper(IDataReader reader) { ScraperModel model = new ScraperModel(); int index = 0; model.Id = reader.GetInt32(index++); model.Header = reader.GetString(index++); model.Description = reader.GetString(index++); model.Image = reader.GetString(index++); return(model); }
public IActionResult Post(ScraperModel arg) { var type = Type.GetType(arg.ClassName); if (type == null) { return(BadRequest($"Scraper class {arg.ClassName} does not exist. The argument is case sensitive.")); } IScraper scraper = ScraperManager.Instance.CreateAndRegisterScraper(type, arg.Arguments); var index = ScraperManager.Instance.GetRegisteredScrapers().ToList().IndexOf(scraper); return(CreatedAtAction(nameof(Get), new { id = index }, scraper)); }
public List <ScraperModel> DisplayAll() { List <ScraperModel> result = new List <ScraperModel>(); _dataProvider.ExecuteCmd( "WebScraper_SelectAll", inputParamMapper : null, singleRecordMapper : delegate(IDataReader reader, short set) { ScraperModel model = DisplayAllMapper(reader); result.Add(model); } ); return(result); }
public async Task <IActionResult> Calc(string BREF, ScraperModel scraper) { { var data = await HttpClientFactory.Create().GetStringAsync(_Idata.GoogleURL()); // http get request - html code in stored in data variable var split_data = data.Split(_Idata.GoogleResultSplit()); // split string and place into array - string represents google search result (normal formatting) var paid_ads = data.Split(_Idata.PaidAdSplit()); // split string and place into array - string represents google search result (paid add formating) var googleP_counter = 0; var paid_ads_counter = 0; var total = 0; foreach (string s in split_data) { try { googleP_counter += 1; if (s.Contains(_Idata.InfotrackURL())) //looking for a string match in the first 100 good search results { total = googleP_counter; ViewBag.google = total; } else { } } catch (Exception ex) { Console.WriteLine(ex.Message); //at a later date - map exception to a different view } } foreach (string p_ads in paid_ads) { paid_ads_counter += 1; } ViewBag.ads = paid_ads_counter; ViewBag.total = total - paid_ads_counter; var new_model = new ScraperModel { input = "infotrack", position = total, paid_ads = paid_ads_counter, date = DateTime.Now }; _context.Add(new_model); await _context.SaveChangesAsync(); } return(View()); }
public ScraperModel SelectById(int id) { ScraperModel model = null; _dataProvider.ExecuteCmd( "WebScraper_SelectById", inputParamMapper : delegate(SqlParameterCollection paramCol) { paramCol.AddWithValue("@Id", id); }, singleRecordMapper : delegate(IDataReader reader, short set) { model = DisplayAllMapper(reader); } ); return(model); }
public static void Initialize(ScraperContext context) { context.Database.EnsureCreated(); // Look for any students. if (context.ScraperData.Any()) { return; // DB has been seeded } var scraperdata = new ScraperModel[] { new ScraperModel { ID = 1, input = "infotrack", position = 15, paid_ads = 3, date = DateTime.Parse("2005-09-01") }, new ScraperModel { ID = 2, input = "infotrack", position = 11, paid_ads = 1, date = DateTime.Parse("2005-09-01") }, new ScraperModel { ID = 3, input = "infotrack", position = 17, paid_ads = 6, date = DateTime.Parse("2005-09-01") }, new ScraperModel { ID = 4, input = "infotrack", position = 12, paid_ads = 3, date = DateTime.Parse("2005-09-01") }, new ScraperModel { ID = 5, input = "infotrack", position = 11, paid_ads = 3, date = DateTime.Parse("2005-09-01") }, new ScraperModel { ID = 6, input = "infotrack", position = 8, paid_ads = 3, date = DateTime.Parse("2005-09-01") }, }; foreach (ScraperModel s in scraperdata) { context.ScraperData.Add(s); } context.SaveChanges(); }
public IEnumerable <ScraperModel> Index() => ScraperManager.Instance.GetRegisteredScrapers().Select(s => ScraperModel.CreateFromScraper(s));