/// <summary> /// Commit valid entities to the underlying data source and update the added,updated and removed count. /// </summary> /// <param name="loadState"></param> protected abstract void Commit(LoaderState <TLoadStateData> loadState);
/// <summary> /// Loads data from the incoming data stream, controls validating of incoming objects and calls commit when all valid /// objects have been loaded. /// </summary> /// <returns>The load process result state.</returns> public ILoaderState Process() { if (Processor == null) { throw new InvalidOperationException("Mapper action has not been assigned."); } if (Commit == null) { throw new InvalidOperationException("Commit action has not been assigned."); } // start time will be assigned var loaderState = new LoaderState <TLoadStateData>(); CurrentLoaderState = loaderState; // ReSharper disable once SuggestVarOrType_Elsewhere var exceptions = loaderState.ErrorsCount; try { loaderState.CurrentRow = 0; // the rules to validate incoming data // ReSharper disable once SuggestVarOrType_Elsewhere ISpecification <TEntityDtoIn>[] rules = _validator.GetSpecifications().ToArray(); // initialize lookups only once if (!_isInitialized) { Initialize?.Invoke(); _isInitialized = true; } IEnumerable <TEntityDtoIn> dataSource = EntitiesGet; if (dataSource == null) { throw new InvalidOperationException("'EntitiesGet' is null."); } var currentBatch = 0; int maxBatchSize = MaxBatchSize ?? DefaultMaxBatchSize; if (maxBatchSize < 1) { Log.Warn( $"Fixing up max batch size as the supplied value is less than 1. Current value is {MaxBatchSize} which is being set to {DefaultMaxBatchSize}."); maxBatchSize = DefaultMaxBatchSize; } Log.Info($"Processing {typeof(TEntityDtoIn).Name} in batches of {maxBatchSize}."); // split stream into batches of 4 foreach (IEnumerable <TEntityDtoIn> batch in dataSource.BatchEx(maxBatchSize)) { currentBatch++; // log the number of batches to enhance testability loaderState.Batches = currentBatch; Log.Info($"Starting to process batch {currentBatch}."); // create new state for each batch loaderState.Valid = new TLoadStateData(); foreach (TEntityDtoIn dto in batch) { loaderState.CurrentRow++; try { // always validate incoming data // ReSharper disable once SuggestVarOrType_Elsewhere var errors = rules.ToErrors(dto).ToCollection(); if (!errors.Any()) { // map + load valid state Processor(loaderState, dto); } else { this.Log.Error(new RuleException( $"Can't process row {loaderState.CurrentRow} due to one or more validation errors. Please see the log for more details.", errors)); loaderState.ErrorsCount++; } } catch (RuleException rex) { Log.Error( $"Can't process row {loaderState.CurrentRow} due to one or more errors. Please see the log for more details."); foreach (Error error in rex.Errors) { Log.Error(error.Message); } loaderState.ErrorsCount++; } catch (ApplicationException apex) { Log.Error(apex, $"Can't process row {loaderState.CurrentRow} due to error: {apex.Message}"); loaderState.ErrorsCount++; } catch (Exception ex) { Log.Error(ex, $"Can't process row {loaderState.CurrentRow} due to an unexpected error."); loaderState.ErrorsCount++; } } Log.Info($"Batch {currentBatch} completed."); Log.Info("Committing changes."); // update target Commit(loaderState); Log.Info("Committed changes."); } } finally { loaderState.EndTime = DateTime.UtcNow; Log.Info(loaderState.ToString()); } return(loaderState); }
/// <summary> /// Map data from an incoming entity to an outgoing entity and ensure the validated outgoing entity is /// added to the loadstate's valid item collection. /// </summary> /// <remarks> /// By default will use an object mapping manager to map dto in to and outgoing dto. /// </remarks> /// <param name="dtoIn">The source entity to map to the target entity.</param> /// <param name="loadState">The state to load validated target entities.</param> protected abstract void Process(TDtoIn dtoIn, LoaderState <TLoadStateData> loadState);