public async Task Import(IJobCancellationToken token) { using var _ = _hub.PushScope(); var transaction = _hub.StartTransaction("import-catalog", "Import nuget catalog"); var logger = _loggerFactory.CreateLogger <NuGetCatalogImporter>(); logger.LogInformation("Starting importing catalog."); var httpClient = _httpClientFactory.CreateClient("nuget"); var catalogClient = new CatalogClient(httpClient, _loggerFactory.CreateLogger <CatalogClient>()); var settings = new CatalogProcessorSettings { DefaultMinCommitTimestamp = DateTimeOffset.MinValue, // Read everything ExcludeRedundantLeaves = false, }; var catalogProcessor = new CatalogProcessor( _cursorStore, catalogClient, _catalogLeafProcessor, settings, _loggerFactory.CreateLogger <CatalogProcessor>()); await catalogProcessor.ProcessAsync(token.ShutdownToken); logger.LogInformation("Finished importing catalog."); transaction.Finish(SpanStatus.Ok); }
public static async Task Run( [TimerTrigger("* */1 * * * *", RunOnStartup = true)] TimerInfo timer, [Queue(Constants.IndexingQueue, Connection = Constants.IndexingQueueConnection)] ICollector <PackageOperation> queueCollector, ILogger logger) { var cursor = new InMemoryCursor(timer.ScheduleStatus?.Last ?? DateTimeOffset.UtcNow); var processor = new CatalogProcessor( cursor, new CatalogClient(HttpClient, new NullLogger <CatalogClient>()), new DelegatingCatalogLeafProcessor( added => { var packageVersion = added.ParsePackageVersion(); queueCollector.Add(PackageOperation.ForAdd( added.PackageId, added.PackageVersion, added.VerbatimVersion, packageVersion.ToNormalizedString(), added.Published, string.Format(Constants.NuGetPackageUrlTemplate, added.PackageId, packageVersion.ToNormalizedString()).ToLowerInvariant(), added.IsListed())); return(Task.FromResult(true)); }, deleted => { queueCollector.Add(PackageOperation.ForDelete( deleted.PackageId, deleted.PackageVersion, deleted.ParsePackageVersion().ToNormalizedString())); return(Task.FromResult(true)); }), new CatalogProcessorSettings { MinCommitTimestamp = timer.ScheduleStatus?.Last ?? DateTimeOffset.UtcNow, MaxCommitTimestamp = timer.ScheduleStatus?.Next ?? DateTimeOffset.UtcNow, ServiceIndexUrl = "https://api.nuget.org/v3/index.json" }, new NullLogger <CatalogProcessor>()); await processor.ProcessAsync(CancellationToken.None); }
public async Task Import(IJobCancellationToken token) { using var _ = _hub.PushScope(); var transaction = _hub.StartTransaction("import-catalog", "catalog.import"); _hub.ConfigureScope(s => s.Transaction = transaction); var logger = _loggerFactory.CreateLogger <NuGetCatalogImporter>(); logger.LogInformation("Starting importing catalog."); try { var httpClient = _httpClientFactory.CreateClient("nuget"); var catalogClient = new CatalogClient(httpClient, _loggerFactory.CreateLogger <CatalogClient>()); var settings = new CatalogProcessorSettings { DefaultMinCommitTimestamp = DateTimeOffset.MinValue, // Read everything ExcludeRedundantLeaves = false, }; var catalogProcessor = new CatalogProcessor( _cursorStore, catalogClient, _catalogLeafProcessor, settings, _loggerFactory.CreateLogger <CatalogProcessor>()); await catalogProcessor.ProcessAsync(token.ShutdownToken); logger.LogInformation("Finished importing catalog."); transaction.Finish(SpanStatus.Ok); } catch (Exception e) { transaction.Finish(e); SentrySdk.CaptureException(e); throw; } finally { await SentrySdk.FlushAsync(TimeSpan.FromSeconds(2)); } }
public async override Task Run() { var token = new CancellationToken(); try { var regionInformations = _configuration.RegionInformations; var instances = new List <Instance>(); foreach (var regionInformation in regionInformations) { instances.AddRange(await _searchServiceClient.GetSearchEndpointsAsync(regionInformation, token)); } var maxCommit = DateTimeOffset.MinValue; foreach (Instance instance in instances) { try { var commitDateTime = await _searchServiceClient.GetCommitDateTimeAsync(instance, token); maxCommit = commitDateTime > maxCommit ? commitDateTime : maxCommit; } catch (Exception e) { Logger.LogError("An exception was encountered so no HTTP response was returned. {Exception}", e); } } if (maxCommit == DateTimeOffset.MinValue) { Logger.LogError("Failed to retrieve a proper starting commit. Abandoning the current run."); return; } var catalogLeafProcessor = new PackageLagCatalogLeafProcessor(instances, _searchServiceClient, _telemetryService, LoggerFactory.CreateLogger <PackageLagCatalogLeafProcessor>()); if (_configuration.RetryLimit > 0) { catalogLeafProcessor.RetryLimit = _configuration.RetryLimit; } if (_configuration.WaitBetweenRetrySeconds > 0) { catalogLeafProcessor.WaitBetweenPolls = TimeSpan.FromSeconds(_configuration.WaitBetweenRetrySeconds); } var settings = new CatalogProcessorSettings { ServiceIndexUrl = _configuration.ServiceIndexUrl, DefaultMinCommitTimestamp = maxCommit, ExcludeRedundantLeaves = false }; var start = new FileCursor("cursor.json", LoggerFactory.CreateLogger <FileCursor>()); await start.SetAsync(maxCommit.AddTicks(1)); var catalogProcessor = new CatalogProcessor(start, _catalogClient, catalogLeafProcessor, settings, LoggerFactory.CreateLogger <CatalogProcessor>()); bool success; int retryCount = 0; do { success = await catalogProcessor.ProcessAsync(); if (!success || !await catalogLeafProcessor.WaitForProcessing()) { retryCount++; Logger.LogError("Processing the catalog leafs failed. Retry Count {CatalogProcessRetryCount}", retryCount); } }while (!success && retryCount < MAX_CATALOG_RETRY_COUNT); return; } catch (Exception e) { Logger.LogError("Exception Occured. {Exception}", e); return; } }