public async Task <IActionResult> PutRecurringKeyword(int id, RecurringKeyword recurringKeyword) { if (id != recurringKeyword.RecurringKeyworId) { return(BadRequest()); } _context.Entry(recurringKeyword).State = EntityState.Modified; try { await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException) { if (!RecurringKeywordExists(id)) { return(NotFound()); } else { throw; } } return(NoContent()); }
public async Task <ActionResult <RecurringKeyword> > PostRecurringKeyword(RecurringKeyword recurringKeyword) { if (!UrlHelper.isValidUrl(recurringKeyword.Url) || !UrlHelper.isValidCountryCode(recurringKeyword.CountryDomain)) { return(BadRequest()); } if (!NewRecurringKeywordExistsAlready(recurringKeyword)) { _context.RecurringKeyword.Add(recurringKeyword); await _context.SaveChangesAsync(); RecurringJobs scrappingInstance = new RecurringJobs(_context, options); //cron job running every day at 7 am to get the position for the day RecurringJob.AddOrUpdate("RecurringKeyword-" + recurringKeyword.RecurringKeyworId, () => scrappingInstance.GoogleScrappingJob(recurringKeyword.Query, recurringKeyword.Url, recurringKeyword.CountryDomain, recurringKeyword.RecurringKeyworId), Cron.Daily); //create fake data do show on graph for (int i = 0; i < 7; i++) { Random random = new Random(); _context.RecurringKeywordPosition.Add(new RecurringKeywordPosition { RecurringKeyworId = recurringKeyword.RecurringKeyworId, Date = DateTime.Today.AddDays(-i - 1), Positions = Convert.ToInt32((random.NextDouble() * (10 - 1) + 1)).ToString() }); } await _context.SaveChangesAsync(); return(CreatedAtAction("GetRecurringKeyword", new { id = recurringKeyword.RecurringKeyworId }, recurringKeyword)); } else { return(Conflict()); } }
private bool NewRecurringKeywordExistsAlready(RecurringKeyword recurringKeyword) { return(_context.RecurringKeyword.Any(e => e.Query == recurringKeyword.Query && e.Url == recurringKeyword.Url && e.CountryDomain == recurringKeyword.CountryDomain)); }