public ScrapeTask NewScrapeTask(BillingAccount billingAccount) { // This method builds a new task with its dependencies and returns it to the caller // All build logic is encapsulated in this method // Values are derived from the billingAccount object // The 3 rule types are created to exist with the task for future use, which could be // a retry after a scrape failed, and so on. // For the purposes of this prototype, some fake values will be entered StatementLifeCycleRule lifeCycleRule = new StatementLifeCycleRule(30, 5, 2); List<RetryAfterUnsuccessfulScrapeRule> retryRules = new List<RetryAfterUnsuccessfulScrapeRule>(); retryRules.Add(new RetryAfterUnsuccessfulScrapeRule(ScheduleErrorType.APSError, 5)); retryRules.Add(new RetryAfterUnsuccessfulScrapeRule(ScheduleErrorType.NoNewStatementError, 6)); retryRules.Add(new RetryAfterUnsuccessfulScrapeRule(ScheduleErrorType.ScraperError, 7)); List<WindowPeriodRule> windowPeriods = new List<WindowPeriodRule>(); windowPeriods.Add(new WindowPeriodRule(new TimeOfDay(1, 0, 0), new TimeOfDay(6, 0, 0))); windowPeriods.Add(new WindowPeriodRule(new TimeOfDay(19, 0, 0), new TimeOfDay(23, 0, 0))); ScrapeTask newTask = new ScrapeTask(Guid.NewGuid().ToString(), billingAccount.BillingAccountId.ToString(), windowPeriods, retryRules, lifeCycleRule); return newTask; }
public IActionResult OnPost() { if (!ModelState.IsValid) { return(Page()); } var oldSaveTime = _settings.SaveTime; _settings.SetSaveTime(SettingsModel.SaveTime); _settings.SetDashboardRefreshTime(SettingsModel.DashboardRefreshTime); _settings.SetUrl(SettingsModel.Url); if (oldSaveTime != _settings.SaveTime) { ScrapeTask.StartScrapeTask(_settings.SaveTime, _settings.Url); } return(RedirectToPage("/Index")); }
public static void Main(string[] args) { if (args.Length == 1) { _listenUrl = args[0]; Console.WriteLine("Using listen address: " + _listenUrl); } Console.WriteLine("Starting DarkStatsCore ({0})...", SettingsLib.VersionInformation); using (var context = new DarkStatsDbContext()) { context.Database.Migrate(); var settings = new SettingsLib(context); if (!settings.InvalidSettings) { ScrapeTask.StartScrapeTask(settings.SaveTime, settings.Url); } } BuildWebHost(args).Run(); }
public static int Main(string[] args) { Log.Logger = new LoggerConfiguration() .MinimumLevel.Debug() .MinimumLevel.Override("Microsoft", LogEventLevel.Warning) .Enrich.FromLogContext() .WriteTo.Console(theme: AnsiConsoleTheme.Code, standardErrorFromLevel: LogEventLevel.Warning) .CreateLogger(); Serilog.Debugging.SelfLog.Enable(Console.Error); try { if (args.Length == 1) { _listenUrl = args[0]; Log.Information("Using listen address: {ListenUrl}", _listenUrl); } Log.Information("Starting DarkStatsCore ({Version})...", SettingsLib.VersionInformation); using (var context = new DarkStatsDbContext()) { context.Database.Migrate(); var settings = new SettingsLib(context); if (!settings.InvalidSettings) { ScrapeTask.StartScrapeTask(settings.SaveTime, settings.Url); } } BuildWebHost(args).Run(); return(0); } catch (Exception e) { Log.Fatal(e, "Host terminated unexpectedly"); return(1); } finally { Log.CloseAndFlush(); } }
public void SaveScrapeTask(ScrapeTask task) { // This will save the scrapetask to the db connection }