private async Task ExecuteCatalog2DnxAsync(PerBatchContext context, IReadOnlyList <PerPackageContext> packageContexts, Uri catalogIndexUri) { var serviceProvider = GetServiceProvider( context, context.Global.FlatContainerContainerName, context.Process.FlatContainerStoragePath); var storageFactory = serviceProvider.GetRequiredService <StorageFactory>(); IAzureStorage preferredPackageSourceStorage = null; var httpClientTimeout = TimeSpan.FromMinutes(10); var maxDegreeOfParallelism = ServicePointManager.DefaultConnectionLimit; var collector = new DnxCatalogCollector( catalogIndexUri, storageFactory, preferredPackageSourceStorage, context.Global.ContentBaseAddress, serviceProvider.GetRequiredService <ITelemetryService>(), serviceProvider.GetRequiredService <ILogger>(), maxDegreeOfParallelism, () => serviceProvider.GetRequiredService <HttpMessageHandler>(), httpClientTimeout); var lowercasePackageIds = packageContexts.Select(x => x.PackageId.ToLowerInvariant()); using (await _stringLocker.AcquireAsync(lowercasePackageIds, TimeSpan.FromMinutes(5))) { await collector.RunAsync(CancellationToken.None); } }
private async Task ExecuteCatalog2LuceneAsync(PerBatchContext context, Uri catalogIndexUri) { var serviceProvider = GetServiceProvider( context, context.LuceneContainerName, storagePath: null); using (var directory = serviceProvider.GetRequiredService <Lucene.Net.Store.Directory>()) using (var indexWriter = Catalog2LuceneJob.CreateIndexWriter(directory)) { var commitEachBatch = false; string baseAddress = null; var collector = new SearchIndexFromCatalogCollector( catalogIndexUri, indexWriter, commitEachBatch, baseAddress, serviceProvider.GetRequiredService <ITelemetryService>(), serviceProvider.GetRequiredService <ILogger>(), () => serviceProvider.GetRequiredService <HttpMessageHandler>()); await collector.RunAsync(CancellationToken.None); } }
private async Task <bool> ProcessNextMessageAsync(PerWorkerContext workerContext, int batchSize) { var batchContext = new PerBatchContext(workerContext, UniqueName.New("batch")); var packageIdentities = new HashSet <string>(StringComparer.OrdinalIgnoreCase); var messages = new List <StorageQueueMessage <PackageMessage> >(); var packageContexts = new List <PerPackageContext>(); StorageQueueMessage <PackageMessage> lastMessage; do { lastMessage = await _queue.GetNextAsync(CancellationToken.None); if (lastMessage != null) { var packageId = lastMessage.Contents.PackageId.Trim(); var packageVersion = NuGetVersion.Parse(lastMessage.Contents.PackageVersion.Trim()).ToNormalizedString(); var packageIdentity = $"{packageId}/{packageVersion}"; // If this is a duplicate package, complete it and skip it. if (!packageIdentities.Add(packageIdentity)) { await _queue.RemoveAsync(lastMessage, CancellationToken.None); continue; } messages.Add(lastMessage); packageContexts.Add(new PerPackageContext(batchContext, packageId, packageVersion)); } }while (messages.Count < batchSize && lastMessage != null && lastMessage.DequeueCount < 10); if (packageContexts.Count == 0) { return(false); } var complete = await ProcessPackagesAsync(batchContext, packageContexts); if (complete) { foreach (var message in messages) { try { await _queue.RemoveAsync(message, CancellationToken.None); } catch (StorageException ex) when(ex.RequestInformation?.HttpStatusCode == (int)HttpStatusCode.NotFound) { // Ignore this error. The message has already been removed. } } } return(true); }
public async Task <bool> ProcessAsync(PerBatchContext context, IReadOnlyList <PerPackageContext> packageContexts) { packageContexts = await GetExistingPackagesAsync(packageContexts); if (!packageContexts.Any()) { return(true); } var catalogIndexUri = await ExecuteFeedToCatalogAsync(context, packageContexts); await ExecuteCatalog2DnxAsync(context, packageContexts, catalogIndexUri); await ExecuteCatalog2RegistrationAsync(context, catalogIndexUri); await ExecuteCatalog2LuceneAsync(context, catalogIndexUri); return(true); }
private async Task ExecuteCatalog2RegistrationAsync(PerBatchContext context, Uri catalogIndexUri) { var serviceProvider = GetServiceProvider( context, context.Global.RegistrationContainerName, storagePath: null); var registrationStorageFactories = serviceProvider.GetRequiredService <RegistrationStorageFactories>(); var collector = new RegistrationCollector( catalogIndexUri, registrationStorageFactories.LegacyStorageFactory, registrationStorageFactories.SemVer2StorageFactory, context.Global.ContentBaseAddress, serviceProvider.GetRequiredService <ITelemetryService>(), () => serviceProvider.GetRequiredService <HttpMessageHandler>()); await collector.RunAsync(CancellationToken.None); }
public async Task CleanUpAsync(PerBatchContext context, IReadOnlyList <PerPackageContext> packageContexts) { var blobClient = BlobStorageUtilities.GetBlobClient(context.Global); // Delete catalog2lucene artifacts. await CleanUpUtilities.DeleteContainer(blobClient, context.LuceneContainerName, _logger); var luceneCachePath = Path.Combine( CleanUpUtilities.GetLuceneCacheDirectory(), context.LuceneContainerName); if (Directory.Exists(luceneCachePath)) { Directory.Delete(luceneCachePath, recursive: true); } // Delete catalog2registration artifacts. await CleanUpUtilities.DeleteBlobsWithPrefix( blobClient, context.Global.RegistrationContainerName, context.Worker.Name, _logger); // Delete catalog2dnx artifacts. foreach (var packageContext in packageContexts) { await CleanUpUtilities.DeleteBlobsWithPrefix( blobClient, context.Global.FlatContainerContainerName, $"{context.Process.FlatContainerStoragePath}/{packageContext.PackageId.ToLowerInvariant()}/{packageContext.PackageVersion.ToLowerInvariant()}", _logger); } // Delete feed2catalog artifacts. await CleanUpUtilities.DeleteBlobsWithPrefix( blobClient, context.Global.CatalogContainerName, context.Worker.Name, _logger); }
public PerPackageContext(PerBatchContext batch, string packageId, string packageVersion) { Batch = batch; PackageId = packageId; PackageVersion = packageVersion; }
private IServiceProvider GetServiceProvider( PerBatchContext context, string containerName, string storagePath) { var serviceCollection = new ServiceCollection(); serviceCollection.AddSingleton(_loggerFactory); serviceCollection.AddSingleton(_telemetryClient); serviceCollection.AddSingleton(_httpMessageHandler); serviceCollection.AddSingleton(x => new HttpClient(x.GetRequiredService <HttpMessageHandler>())); serviceCollection.AddTransient <ITelemetryService, TelemetryService>(); serviceCollection.AddTransient(x => x.GetRequiredService <ILoggerFactory>().CreateLogger(typeof(Program))); serviceCollection.AddTransient <IStorageFactory, StorageFactory>(x => x.GetRequiredService <StorageFactory>()); serviceCollection.AddTransient <IStorage>(x => x.GetRequiredService <IStorageFactory>().Create()); serviceCollection.AddTransient(x => CommandHelpers.CreateStorageFactory(new Dictionary <string, string> { { Arguments.StorageType, Arguments.AzureStorageType }, { Arguments.StorageBaseAddress, $"{context.Global.StorageBaseAddress}/{containerName}/{storagePath}/" }, { Arguments.StorageAccountName, context.Global.StorageAccountName }, { Arguments.StorageContainer, containerName }, { Arguments.StorageKeyValue, context.Global.StorageKeyValue }, { Arguments.StoragePath, storagePath }, }, verbose: true)); serviceCollection.AddTransient(x => { return(CommandHelpers.CreateRegistrationStorageFactories(new Dictionary <string, string> { { Arguments.StorageType, Arguments.AzureStorageType }, { Arguments.StorageBaseAddress, $"{context.Global.StorageBaseAddress}/{containerName}/{context.Worker.RegistrationLegacyStoragePath}/" }, { Arguments.StorageAccountName, context.Global.StorageAccountName }, { Arguments.StorageContainer, containerName }, { Arguments.StorageKeyValue, context.Global.StorageKeyValue }, { Arguments.StoragePath, context.Worker.RegistrationLegacyStoragePath }, { Arguments.UseCompressedStorage, "true" }, { Arguments.CompressedStorageBaseAddress, $"{context.Global.StorageBaseAddress}/{containerName}/{context.Worker.RegistrationCompressedStoragePath}/" }, { Arguments.CompressedStorageAccountName, context.Global.StorageAccountName }, { Arguments.CompressedStorageContainer, containerName }, { Arguments.CompressedStorageKeyValue, context.Global.StorageKeyValue }, { Arguments.CompressedStoragePath, context.Worker.RegistrationCompressedStoragePath }, { Arguments.UseSemVer2Storage, "true" }, { Arguments.SemVer2StorageBaseAddress, $"{context.Global.StorageBaseAddress}/{containerName}/{context.Worker.RegistrationSemVer2StoragePath}/" }, { Arguments.SemVer2StorageAccountName, context.Global.StorageAccountName }, { Arguments.SemVer2StorageContainer, containerName }, { Arguments.SemVer2StorageKeyValue, context.Global.StorageKeyValue }, { Arguments.SemVer2StoragePath, context.Worker.RegistrationSemVer2StoragePath }, }, verbose: true)); }); serviceCollection.AddTransient(x => CommandHelpers.GetLuceneDirectory(new Dictionary <string, string> { { Arguments.LuceneDirectoryType, Arguments.AzureStorageType }, { Arguments.LuceneStorageAccountName, context.Global.StorageAccountName }, { Arguments.LuceneStorageContainer, containerName }, { Arguments.LuceneStorageKeyValue, context.Global.StorageKeyValue }, })); return(serviceCollection.BuildServiceProvider()); }
private async Task <Uri> ExecuteFeedToCatalogAsync(PerBatchContext context, IReadOnlyList <PerPackageContext> packageContexts) { var serviceProvider = GetServiceProvider( context, context.Global.CatalogContainerName, context.Worker.CatalogStoragePath); var now = DateTime.UtcNow; var offset = 0; var packages = new SortedList <DateTime, IList <FeedPackageDetails> >(); var maxDegreeOfParallelism = ServicePointManager.DefaultConnectionLimit; foreach (var packageContext in packageContexts) { // These timestamps don't matter too much since the order that items are processed within a catalog // commit is not defined. This is just a convenient way to get a bunch of unique timestamps to ease // debugging. var key = now.AddSeconds(offset--); var published = now.AddSeconds(offset--); var lastEdited = now.AddSeconds(offset--); var created = now.AddSeconds(offset--); packages.Add(key, new List <FeedPackageDetails> { new FeedPackageDetails( packageContext.PackageUri, created, lastEdited, published, packageContext.PackageId, packageContext.PackageVersion) }); } var storage = serviceProvider.GetRequiredService <IStorage>(); var createdPackages = true; var updateCreatedFromEdited = false; using (var httpClient = serviceProvider.GetRequiredService <HttpClient>()) { var telemetryService = serviceProvider.GetRequiredService <ITelemetryService>(); var logger = serviceProvider.GetRequiredService <ILogger>(); var packageCatalogItemCreator = PackageCatalogItemCreator.Create( httpClient, telemetryService, logger, storage: null); await FeedHelpers.DownloadMetadata2CatalogAsync( packageCatalogItemCreator, packages, storage, now, now, now, maxDegreeOfParallelism, createdPackages, updateCreatedFromEdited, CancellationToken.None, telemetryService, logger); } return(storage.ResolveUri("index.json")); }
private async Task <bool> ProcessPackagesAsync(PerBatchContext batchContext, List <PerPackageContext> packageContexts) { var packagesForLogging = packageContexts .Select(x => new { Id = x.PackageId, Version = x.PackageVersion }) .ToList(); bool complete; try { complete = await _perBatchProcessor.ProcessAsync(batchContext, packageContexts); if (complete) { foreach (var package in packageContexts) { _logger.LogInformation( "Package {PackageId}/{PackageVersion} was completed by worker {WorkerName}, process {ProcessName}.", package.PackageId, package.PackageVersion, batchContext.Worker.Name, batchContext.Process.Name); } } else { _logger.LogWarning( "Packages {Packages} were not completed by worker {WorkerName}, process {ProcessName}.", packagesForLogging, batchContext.Worker.Name, batchContext.Process.Name); } } catch (Exception ex) { _logger.LogError( 0, ex, "An exception was thrown while processing packages {Packages}, worker {WorkerName}, process {ProcessName}.", packagesForLogging, batchContext.Worker.Name, batchContext.Process.Name); complete = false; } try { await _perBatchProcessor.CleanUpAsync(batchContext, packageContexts); } catch (Exception ex) { _logger.LogError( 0, ex, "An exception was thrown while cleaning up packages {Packages}, worker {WorkerName}, process {ProcessName}.", packagesForLogging, batchContext.Worker.Name, batchContext.Process.Name); } return(complete); }