protected override async Task ExecuteAsync(CancellationToken stoppingToken) { while (!stoppingToken.IsCancellationRequested) { try { if (!Enabled) { goto sleep; } await using (await _locker.EnterAsync($"scrape:{Type}", stoppingToken)) { _logger.LogDebug($"Begin {Type} scrape."); await TestAsync(stoppingToken); using (_scrapingTime.Labels(Type.ToString()).Measure()) await RunAsync(stoppingToken); _logger.LogDebug($"End {Type} scrape."); } } catch (Exception e) { _scrapeErrors.Labels(Type.ToString()).Inc(); _logger.LogWarning(e, $"Exception while scraping {Type}."); } sleep: await Task.Delay(_options.CurrentValue.Interval, stoppingToken); } }
public async Task IndexAsync(DbBook[] books, CancellationToken cancellationToken = default) { books = DryMerge(books); // must prevent concurrent book indexing because unique books are created in bulk await using (await _locker.EnterAsync("index:book", cancellationToken)) { // refresh immediately to speed up indexing using (_client.UseIndexingOptions(new IndexingOptions { Refresh = Refresh.True })) await CreateAsync(await MergeAsync(books, cancellationToken), cancellationToken); } }
protected sealed override async Task ExecuteAsync(CancellationToken stoppingToken) { while (!stoppingToken.IsCancellationRequested) { try { if (!Enabled) { goto sleep; } await using (await _writeControl.EnterAsync(stoppingToken)) await using (await _locker.EnterAsync($"scrape:{Type}", stoppingToken)) { _logger.LogDebug($"Begin {Type} scrape."); // run tests before scrape await TestAsync(stoppingToken); // load last state from storage var state = await _storage.ReadObjectAsync <TState>($"scrapers/{Type}/state", stoppingToken) ?? new TState(); // execute scraper using (ScraperMetrics.ScrapingTime.Labels(Type.ToString()).Measure()) await RunAsync(state, stoppingToken); // save state await _storage.WriteObjectAsync($"scrapers/{Type}/state", state, stoppingToken); if (_logger.IsEnabled(LogLevel.Debug)) { _logger.LogDebug($"Saved scraper state {Type}: {JsonConvert.SerializeObject(state)}"); } _logger.LogDebug($"End {Type} scrape."); } } catch (Exception e) { SentrySdk.CaptureException(e); ScraperMetrics.ScrapeErrors.Labels(Type.ToString()).Inc(); _logger.LogWarning(e, $"Exception while scraping {Type}."); } sleep: await Task.Delay(_options.CurrentValue.Interval, stoppingToken); } }
public async Task <DbUser> GetOrCreateUserAsync(DiscordOAuthUser user, CancellationToken cancellationToken = default) { await using (await _locker.EnterAsync($"oauth:discord:{user.Id}", cancellationToken)) { var entry = await GetByIdAsync(user.Id, cancellationToken); // if user already exists, update their info if (entry != null) { do { user.ApplyOn(entry.Value); }while (!await entry.TryUpdateAsync(cancellationToken)); } // otherwise create new user else { entry = _client.Entry(_users.MakeUserObject()); user.ApplyOn(entry.Value); await entry.CreateAsync(cancellationToken); await _snapshots.CreateAsync(entry.Value, new SnapshotArgs { Source = SnapshotSource.User, Committer = entry.Value, Event = SnapshotEvent.AfterCreation, Reason = $"Registered via Discord OAuth2 '{user.Username}'." }, cancellationToken); } return(entry.Value); } }
public async Task RunAsync(CancellationToken cancellationToken = default) { // block database writes await _writeControl.BlockAsync(cancellationToken); await using (await _locker.EnterAsync("maintenance:migrations", cancellationToken)) { var options = _options.CurrentValue; var indexes = await _elastic.RequestAsync(c => c.Cat.IndicesAsync(cc => cc.Index($"{options.IndexPrefix}*"), cancellationToken)); var count = 0; // not all indexes get migrated every migration, so use the max var lastMigrationId = indexes.Records.Select(r => TryParseIndexName(r.Index, out _, out var migrationId) ? migrationId : 0).Max(); foreach (var nextMigrationId in MigrationTypes.Keys.OrderBy(x => x)) { if (nextMigrationId <= lastMigrationId) { continue; } var migration = (MigrationBase)ActivatorUtilities.CreateInstance(_services, MigrationTypes[nextMigrationId]); _logger.LogWarning($"Applying migration {migration.Id}."); try { using var measure = new MeasureContext(); await migration.RunAsync(cancellationToken); _logger.LogInformation($"Successfully migrated {migration.Id} in {measure}."); } catch (Exception e) { _logger.LogError(e, $"Could not apply migration {migration.Id}."); foreach (var index in migration.IndexesCreated) { try { await _elastic.RequestAsync(c => c.Indices.DeleteAsync(index, null, cancellationToken)); _logger.LogInformation($"Deleted incomplete index '{index}'."); } catch (Exception ee) { _logger.LogWarning(ee, $"Could not delete incomplete index '{index}'."); } } break; } count++; } _logger.LogInformation($"All {count} migration(s) applied."); } }