protected override async Task <bool> Fetch(CollectorHttpClient client, ReadWriteCursor front, ReadCursor back) { await front.Load(); DateTime frontDateTime = front.Value; JObject root = await client.GetJObjectAsync(Index); List <Task <JObject> > tasks = new List <Task <JObject> >(); foreach (JObject rootItem in root["items"]) { DateTime pageTimeStamp = rootItem["commitTimeStamp"].ToObject <DateTime>(); if (pageTimeStamp > frontDateTime) { int count = int.Parse(rootItem["count"].ToString()); Total += count; front.Value = pageTimeStamp; await front.Save(); } } return(true); }
public DatabasePackageStatusOutdatedCheckSource( ReadWriteCursor cursor, IGalleryDatabaseQueryService galleryDatabase) : base(cursor) { _galleryDatabaseQueryService = galleryDatabase ?? throw new ArgumentNullException(nameof(galleryDatabase)); }
protected override void Init(IDictionary <string, string> arguments, CancellationToken cancellationToken) { var gallery = arguments.GetOrThrow <string>(Arguments.Gallery); var index = arguments.GetOrThrow <string>(Arguments.Index); var source = arguments.GetOrThrow <string>(Arguments.Source); var verbose = arguments.GetOrDefault(Arguments.Verbose, false); CommandHelpers.AssertAzureStorage(arguments); var monitoringStorageFactory = CommandHelpers.CreateStorageFactory(arguments, verbose); var endpointInputs = CommandHelpers.GetEndpointFactoryInputs(arguments); var messageHandlerFactory = CommandHelpers.GetHttpMessageHandlerFactory(TelemetryService, verbose); var statusService = CommandHelpers.GetPackageMonitoringStatusService(arguments, monitoringStorageFactory, LoggerFactory); var queue = CommandHelpers.CreateStorageQueue <PackageValidatorContext>(arguments, PackageValidatorContext.Version); Logger.LogInformation( "CONFIG gallery: {Gallery} index: {Index} storage: {Storage} endpoints: {Endpoints}", gallery, index, monitoringStorageFactory, string.Join(", ", endpointInputs.Select(e => e.Name))); var context = _collectorFactory.Create( queue, source, monitoringStorageFactory, endpointInputs, TelemetryService, messageHandlerFactory); _collector = context.Collector; _front = context.Front; _back = context.Back; }
protected override async Task<bool> Fetch(CollectorHttpClient client, ReadWriteCursor front, ReadCursor back, CancellationToken cancellationToken) { await front.Load(cancellationToken); DateTime frontDateTime = front.Value; JObject root = await client.GetJObjectAsync(Index, cancellationToken); List<Task<JObject>> tasks = new List<Task<JObject>>(); foreach (JObject rootItem in root["items"]) { DateTime pageTimeStamp = rootItem["commitTimeStamp"].ToObject<DateTime>(); if (pageTimeStamp > frontDateTime) { int count = int.Parse(rootItem["count"].ToString()); Total += count; front.Value = pageTimeStamp; await front.Save(cancellationToken); } } return true; }
public AuditingStoragePackageStatusOutdatedCheckSource( ReadWriteCursor cursor, CatalogStorage auditingStorage, ILogger <AuditingStoragePackageStatusOutdatedCheckSource> logger) : base(cursor) { _auditingStorage = auditingStorage ?? throw new ArgumentNullException(nameof(auditingStorage)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); }
public PackageValidatorContextEnqueuer( ValidationCollector collector, ReadWriteCursor front, ReadCursor back) { _collector = collector ?? throw new ArgumentNullException(nameof(collector)); _front = front ?? throw new ArgumentNullException(nameof(front)); _back = back ?? throw new ArgumentNullException(nameof(back)); }
protected override void Init(IDictionary <string, string> arguments, CancellationToken cancellationToken) { var source = arguments.GetOrThrow <string>(Arguments.Source); var verbose = arguments.GetOrDefault(Arguments.Verbose, false); var contentBaseAddress = arguments.GetOrDefault <string>(Arguments.ContentBaseAddress); var galleryBaseAddress = arguments.GetOrDefault <string>(Arguments.GalleryBaseAddress); var isContentFlatContainer = arguments.GetOrDefault <bool>(Arguments.ContentIsFlatContainer); var allIconsInFlatContainer = arguments.GetOrDefault <bool>(Arguments.AllIconsInFlatContainer); var maxConcurrentBatches = MaxConcurrentBatches(arguments.GetOrDefault <int>(Arguments.MaxConcurrentBatches)); // The term "legacy" here refers to the registration hives that do not contain any SemVer 2.0.0 packages. // In production, this is two registration hives: // 1) the first hive released, which is not gzipped and does not have SemVer 2.0.0 packages // 2) the secondary hive released, which is gzipped but does not have SemVer 2.0.0 packages var storageFactories = CommandHelpers.CreateRegistrationStorageFactories(arguments, verbose); Logger.LogInformation( "CONFIG source: \"{ConfigSource}\" storage: \"{Storage}\"", source, storageFactories.LegacyStorageFactory); if (isContentFlatContainer) { var flatContainerCursorUriString = arguments.GetOrThrow <string>(Arguments.CursorUri); var flatContainerName = arguments.GetOrThrow <string>(Arguments.FlatContainerName); RegistrationMakerCatalogItem.PackagePathProvider = new FlatContainerPackagePathProvider(flatContainerName); // In case that the flat container is used as the packages' source the registration needs to wait for the flatcontainer cursor _back = new HttpReadCursor(new Uri(flatContainerCursorUriString)); } else { RegistrationMakerCatalogItem.PackagePathProvider = new PackagesFolderPackagePathProvider(); _back = MemoryCursor.CreateMax(); } _collector = new RegistrationCollector( new Uri(source), storageFactories.LegacyStorageFactory, storageFactories.SemVer2StorageFactory, contentBaseAddress == null ? null : new Uri(contentBaseAddress), galleryBaseAddress == null ? null : new Uri(galleryBaseAddress), allIconsInFlatContainer, TelemetryService, Logger, CommandHelpers.GetHttpMessageHandlerFactory(TelemetryService, verbose), maxConcurrentBatches: maxConcurrentBatches); var cursorStorage = storageFactories.LegacyStorageFactory.Create(); _front = new DurableCursor(cursorStorage.ResolveUri("cursor.json"), cursorStorage, MemoryCursor.MinValue); storageFactories.SemVer2StorageFactory?.Create(); _destination = storageFactories.LegacyStorageFactory.DestinationAddress; TelemetryService.GlobalDimensions[TelemetryConstants.Destination] = _destination?.AbsoluteUri; }
public AdvisoryCollector( ReadWriteCursor <DateTimeOffset> cursor, IAdvisoryQueryService queryService, IAdvisoryIngestor ingestor, ILogger <AdvisoryCollector> logger) { _cursor = cursor ?? throw new ArgumentNullException(nameof(cursor)); _queryService = queryService ?? throw new ArgumentNullException(nameof(queryService)); _ingestor = ingestor ?? throw new ArgumentNullException(nameof(ingestor)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); }
private async Task <CatalogItemBatch[]> CreateBatchesAsync(ReadWriteCursor front, ReadCursor back, JObject page) { IEnumerable <CatalogItem> pageItems = page["items"] .Select(item => new CatalogItem((JObject)item)) .Where(item => item.CommitTimeStamp > front.Value && item.CommitTimeStamp <= back.Value); IEnumerable <CatalogItemBatch> batches = await CreateBatchesAsync(pageItems); return(batches .OrderBy(batch => batch.CommitTimeStamp) .ToArray()); }
protected override void Init(IDictionary <string, string> arguments, CancellationToken cancellationToken) { var verbose = arguments.GetOrDefault(Arguments.Verbose, false); _maxRequeueQueueSize = arguments.GetOrDefault(Arguments.MaxRequeueQueueSize, DefaultMaxQueueSize); CommandHelpers.AssertAzureStorage(arguments); var monitoringStorageFactory = CommandHelpers.CreateStorageFactory(arguments, verbose); _statusService = CommandHelpers.GetPackageMonitoringStatusService(arguments, monitoringStorageFactory, LoggerFactory); _packageValidatorContextQueue = CommandHelpers.CreateStorageQueue <PackageValidatorContext>(arguments, PackageValidatorContext.Version); Logger.LogInformation( "CONFIG storage: {Storage}", monitoringStorageFactory); _monitoringCursor = ValidationFactory.GetFront(monitoringStorageFactory); _galleryCursor = CreateCursor(monitoringStorageFactory, GalleryCursorFileName); _deletedCursor = CreateCursor(monitoringStorageFactory, DeletedCursorFileName); var connectionString = arguments.GetOrThrow <string>(Arguments.ConnectionString); var galleryDbConnection = new AzureSqlConnectionFactory( connectionString, SecretInjector, LoggerFactory.CreateLogger <AzureSqlConnectionFactory>()); var packageContentUriBuilder = new PackageContentUriBuilder( arguments.GetOrThrow <string>(Arguments.PackageContentUrlFormat)); var timeoutInSeconds = arguments.GetOrDefault(Arguments.SqlCommandTimeoutInSeconds, 300); _galleryDatabaseQueryService = new GalleryDatabaseQueryService( galleryDbConnection, packageContentUriBuilder, TelemetryService, timeoutInSeconds); var auditingStorageFactory = CommandHelpers.CreateSuffixedStorageFactory( "Auditing", arguments, verbose, new SemaphoreSlimThrottle(new SemaphoreSlim(ServicePointManager.DefaultConnectionLimit))); _auditingStorage = auditingStorageFactory.Create(); var messageHandlerFactory = CommandHelpers.GetHttpMessageHandlerFactory(TelemetryService, verbose); _client = new CollectorHttpClient(messageHandlerFactory()); }
protected override void Init(IDictionary <string, string> arguments, CancellationToken cancellationToken) { var source = arguments.GetOrThrow <string>(Arguments.Source); var verbose = arguments.GetOrDefault(Arguments.Verbose, false); var contentBaseAddress = arguments.GetOrDefault <string>(Arguments.ContentBaseAddress); var storageFactory = CommandHelpers.CreateStorageFactory(arguments, verbose); var httpClientTimeoutInSeconds = arguments.GetOrDefault <int?>(Arguments.HttpClientTimeoutInSeconds); var httpClientTimeout = httpClientTimeoutInSeconds.HasValue ? (TimeSpan?)TimeSpan.FromSeconds(httpClientTimeoutInSeconds.Value) : null; StorageFactory preferredPackageSourceStorageFactory = null; IAzureStorage preferredPackageSourceStorage = null; var preferAlternatePackageSourceStorage = arguments.GetOrDefault(Arguments.PreferAlternatePackageSourceStorage, defaultValue: false); if (preferAlternatePackageSourceStorage) { preferredPackageSourceStorageFactory = CommandHelpers.CreateSuffixedStorageFactory("PreferredPackageSourceStorage", arguments, verbose); preferredPackageSourceStorage = preferredPackageSourceStorageFactory.Create() as IAzureStorage; } Logger.LogInformation("CONFIG source: \"{ConfigSource}\" storage: \"{Storage}\" preferred package source storage: \"{PreferredPackageSourceStorage}\"", source, storageFactory, preferredPackageSourceStorageFactory); Logger.LogInformation("HTTP client timeout: {Timeout}", httpClientTimeout); MaxDegreeOfParallelism = 256; _collector = new DnxCatalogCollector( new Uri(source), storageFactory, preferredPackageSourceStorage, contentBaseAddress == null ? null : new Uri(contentBaseAddress), TelemetryService, Logger, MaxDegreeOfParallelism, httpClient => new CatalogClient(new SimpleHttpClient(httpClient, LoggerFactory.CreateLogger <SimpleHttpClient>()), LoggerFactory.CreateLogger <CatalogClient>()), CommandHelpers.GetHttpMessageHandlerFactory(TelemetryService, verbose), httpClientTimeout); var storage = storageFactory.Create(); _front = new DurableCursor(storage.ResolveUri("cursor.json"), storage, MemoryCursor.MinValue); _back = MemoryCursor.CreateMax(); _destination = storageFactory.BaseAddress; TelemetryService.GlobalDimensions[TelemetryConstants.Destination] = _destination.AbsoluteUri; }
protected override Task <bool> FetchAsync( CollectorHttpClient client, ReadWriteCursor front, ReadCursor back, CancellationToken cancellationToken) { return(CatalogCommitUtilities.ProcessCatalogCommitsAsync( client, front, back, FetchCatalogCommitsAsync, CreateBatchesAsync, ProcessBatchAsync, _maxConcurrentBatches, _logger, cancellationToken)); }
// Summary: // // 1. Process one catalog page at a time. // 2. Within a given catalog page, batch catalog commit entries by lower-cased package ID. // 3. Process up to `n` batches in parallel. Note that the batches may span multiple catalog commits. // 4. Cease processing new batches if a failure has been observed. This job will eventually retry // batches on its next outermost job loop. // 5. If a failure has been observed, wait for all existing tasks to complete. Avoid task cancellation // as that could lead to the entirety of a package registration being in an inconsistent state. // To be fair, a well-timed exception could have the same result, but registration updates have never // been transactional. Actively cancelling tasks would make an inconsistent registration more likely. // 6. Update the cursor if and only if all preceding commits and the current (oldest) commit have been // fully and successfully processed. protected override async Task <bool> FetchAsync( CollectorHttpClient client, ReadWriteCursor front, ReadCursor back, CancellationToken cancellationToken) { IEnumerable <CatalogItem> catalogItems = await FetchCatalogItemsAsync(client, front, cancellationToken); var hasAnyBatchFailed = false; var hasAnyBatchBeenProcessed = false; foreach (CatalogItem catalogItem in catalogItems) { JObject page = await client.GetJObjectAsync(catalogItem.Uri, cancellationToken); JToken context = page["@context"]; CatalogItemBatch[] batches = await CreateBatchesAsync(front, back, page); SortedDictionary <DateTime, CommitBatchTasks> commitBatchTasksMap = CreateCommitBatchTasksMap(batches); var unprocessedBatches = new Queue <CatalogItemBatch>(batches); var processingBatches = new Queue <BatchTask>(); CatalogItemBatch lastBatch = unprocessedBatches.LastOrDefault(); var exceptions = new List <Exception>(); EnqueueBatchesIfNoFailures( client, context, commitBatchTasksMap, unprocessedBatches, processingBatches, lastBatch, cancellationToken); while (processingBatches.Any()) { var activeTasks = processingBatches.Where(batch => !batch.Task.IsCompleted) .Select(batch => batch.Task) .DefaultIfEmpty(CompletedTask); await Task.WhenAny(activeTasks); while (!hasAnyBatchFailed && commitBatchTasksMap.Any()) { var commitBatchTasks = commitBatchTasksMap.First().Value; var isCommitFullyProcessed = commitBatchTasks.BatchTasks.All(batch => batch.Task != null && batch.Task.IsCompleted); if (!isCommitFullyProcessed) { break; } var isCommitSuccessfullyProcessed = commitBatchTasks.BatchTasks.All(batch => batch.Task.Status == TaskStatus.RanToCompletion); if (isCommitSuccessfullyProcessed) { var commitTimeStamp = commitBatchTasks.CommitTimeStamp; front.Value = commitTimeStamp; await front.SaveAsync(cancellationToken); Trace.TraceInformation($"{nameof(RegistrationCollector)}.{nameof(FetchAsync)} {nameof(front)}.{nameof(front.Value)} saved since timestamp changed from previous: {{0}}", front); DequeueBatchesWhileMatches(processingBatches, batch => batch.CommitTimeStamp == commitTimeStamp); commitBatchTasksMap.Remove(commitTimeStamp); } else // Canceled or Failed { hasAnyBatchFailed = true; exceptions.AddRange( commitBatchTasks.BatchTasks .Select(batch => batch.Task) .Where(task => (task.IsFaulted || task.IsCanceled) && task.Exception != null) .Select(task => task.Exception)); } } if (hasAnyBatchFailed) { DequeueBatchesWhileMatches(processingBatches, batch => batch.Task.IsCompleted); } hasAnyBatchBeenProcessed = true; EnqueueBatchesIfNoFailures( client, context, commitBatchTasksMap, unprocessedBatches, processingBatches, lastBatch, cancellationToken); } if (hasAnyBatchFailed) { var innerException = exceptions.Count == 1 ? exceptions.Single() : new AggregateException(exceptions); throw new BatchProcessingException(innerException); } } return(hasAnyBatchBeenProcessed); }
public PackageStatusOutdatedCheckSource(ReadWriteCursor cursor) { _cursor = cursor ?? throw new ArgumentNullException(nameof(cursor)); }
public Result(ValidationCollector collector, ReadWriteCursor front, ReadCursor back) { Collector = collector; Front = front; Back = back; }