public ValidationReport ValidateAndUpdate(int chunkSize, int maxDegreeOfParallelism, ITaskExecutionContext context) { var report = new ValidationReport(); if (!string.IsNullOrEmpty(ValidationFailedField) && !string.IsNullOrEmpty(ValidationMessageField)) { var productIds = GetProductIds(); int userId = _userProvider.GetUserId(); if (userId == 0) { throw new Exception("userId is not defined"); } Parallel.ForEach(productIds.Section(chunkSize), new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, (chunk, state) => { var errors = new ConcurrentDictionary <int, string>(); int n = 0; foreach (var productId in chunk) { if (state.IsStopped) { return; } else if (n % maxDegreeOfParallelism == 0 && context.IsCancellationRequested) { context.IsCancelled = true; state.Stop(); return; } n++; try { var validation = _articleService.XamlValidationById(productId, true); var validationResult = ActionTaskResult.FromRulesException(validation, productId); if (!validationResult.IsSuccess) { errors.TryAdd(productId, validationResult.ToString()); report.InvalidProductsCount++; } } catch (Exception ex) { errors.TryAdd(productId, ex.Message); report.ValidationErrorsCount++; _logger.ErrorException($"Error while validating product {productId}", ex); } byte progress = (byte)(++report.TotalProductsCount * 100 / productIds.Length); context.SetProgress(progress); } var updateResult = UpdateValidationInfo(chunk.ToArray(), errors, ValidationFailedField, ValidationMessageField, userId); report.ValidatedProductsCount += updateResult.ProductsCount; report.UpdatedProductsCount += updateResult.UpdatedProuctsCount; }); } return(report); }
public async Task ImportAsync(ITaskExecutionContext executionContext, string language, string state, Dictionary <string, IProductStore> stores) { if (executionContext.IsCancellationRequested) { executionContext.IsCancelled = true; return; } var url = _configuration.GetReindexUrl(language, state); _logger.LogInformation("Starting import..."); var ids = await GetIds(url); _logger.LogInformation($"Product list received. Length: {ids.Length}. Splitting products into chunks by {_options.ChunkSize}..."); var chunks = ids.Chunk(_options.ChunkSize).ToArray(); var index = 1; float progress = 0; foreach (var chunk in chunks) { if (executionContext.IsCancellationRequested) { executionContext.IsCancelled = true; return; } var enumerable = chunk as int[] ?? chunk.ToArray(); _logger.LogInformation($"Chunk {index} with ids ({string.Join(",", enumerable)}) requested..."); var dataTasks = enumerable.Select(n => GetProductById(url, n)); ProductPostProcessorData[] data; try { data = await Task.WhenAll(dataTasks); } catch (Exception ex) { string message = $"An error occurs while receiving products for chunk {index}"; _logger.LogError(ex, message); executionContext.Result = ActionTaskResult.Error(message); throw; } _logger.LogInformation($"Products from chunk {index} received. Starting bulk import..."); var result = await _manager.BulkCreateAsync(data, language, state, stores); if (result.Succeeded) { _logger.LogInformation($"Bulk import for chunk {index} succeeded."); index++; } else { string message = $"Cannot proceed bulk import for chunk {index}: {result}"; _logger.LogError(message); executionContext.Result = ActionTaskResult.Error(message); throw result.GetException(); } progress += (float)100 / chunks.Length; executionContext.SetProgress((byte)progress); } executionContext.Result = ActionTaskResult.Success("Import completed"); }