public void SupplementalData_AddsAndRetrievesMultipleTypesOfEntities() { var entity1 = new BasicTestEntity(1, "foo"); var entity2 = new TestEntity(1, "foo"); var supplementalData = new SupplementalData(); supplementalData.AddOrUpdate(entity1); supplementalData.AddOrUpdate(entity2); IReadOnlyList <IIdentifiable> allEntities = supplementalData.GetAll(); Assert.AreEqual(2, allEntities.Count); }
public void SupplementalData_GetAllEntitiesByTypeReturnsExpectedResults() { var entity1 = new BasicTestEntity(1, "foo"); var entity2 = new BasicTestEntity(2, "bar"); var entity3 = new BasicTestEntity(3, "baz"); var supplementalData = new SupplementalData(); supplementalData.AddOrUpdate(entity1); supplementalData.AddOrUpdate(entity2); supplementalData.AddOrUpdate(entity3); Assert.AreEqual(3, supplementalData.GetAll <BasicTestEntity>().Count); }
public void SupplementalData_UpdatesEntityById() { var initialEntityToAdd = new BasicTestEntity(1, "foo"); var updatedEntityToAdd = new BasicTestEntity(1, "bar"); var supplementalData = new SupplementalData(); supplementalData.AddOrUpdate(initialEntityToAdd); supplementalData.AddOrUpdate(updatedEntityToAdd); Assert.AreEqual(1, supplementalData.GetAll <BasicTestEntity>().Count); var entityRetrieved = supplementalData.GetById <BasicTestEntity>(1); Assert.AreSame(updatedEntityToAdd, entityRetrieved); }
public void SupplementalData_AddsAndRetrieveEntityById() { var entityToAdd = new BasicTestEntity(1); var supplementalData = new SupplementalData(); supplementalData.AddOrUpdate(entityToAdd); var entityRetrieved = supplementalData.GetById <BasicTestEntity>(1); Assert.AreSame(entityToAdd, entityRetrieved); }
/// <summary> /// Splits the set of entities to be created or updated into batches of 50, /// processes each sequentially, and consolidates and returns the results. /// </summary> /// <typeparam name="T">The type of data entity.</typeparam> /// <param name="context">The object of state through the pipeline.</param> /// <param name="logger">The logging instance.</param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// <returns>The completed asynchronous task.</returns> public async Task ProcessAsync <T>( PipelineContext <T> context, ILogger logger, CancellationToken cancellationToken) { var writeContext = (IWritableContext <T>)context; string httpMethod = context.MethodType.ToString(); var allResults = new Results <T>(); var allSupplementalData = new SupplementalData(); int batchNumber = 0; if (writeContext.Items.Count() > MaxItemsPerBatch) { List <T> allItems = writeContext.Items.ToList(); int totalBatchCount = GetBatchCount(allItems.Count, MaxItemsPerBatch); foreach (IEnumerable <T> batch in allItems.MakeBatchesOfSize(MaxItemsPerBatch)) { cancellationToken.ThrowIfCancellationRequested(); T[] batchItems = batch.ToArray(); ++batchNumber; logger?.LogInformation( context.LogContext.EventId, "{CorrelationId} {HttpMethod} Sending batch #{BatchNumber} of {TotalBatchCount} (batch size: {BatchSize}).", context.LogContext.CorrelationId, httpMethod, batchNumber, totalBatchCount, batchItems.Length); writeContext.Items = batchItems; try { await InnerPipeline.ProcessAsync(context, logger, cancellationToken).ConfigureAwait(false); } catch (MultiStatusException <T> ex) { // The index values for error items will be in the range of 0-49 // since the underlying pipeline handles single calls of max 50 items. // We need to offset the indexes to account for the previous batches // that have already been sent,. int indexOffset = (batchNumber - 1) * MaxItemsPerBatch; allResults.ErrorItems.AddRange( ex.FailureResults.Select(f => { f.Index += indexOffset; return(f); })); } // Consolidate the results of each batch back into a single list. allResults.Items.AddRange(context.Results.Items); allSupplementalData.AddOrUpdate( context.ResultsMeta.SupplementalData.GetAll()); } context.Results = allResults; context.ResultsMeta.SupplementalData = allSupplementalData; } else { // There are few enough items that batching isn't needed. // In this case, we can simply call the inner pipeline. await InnerPipeline.ProcessAsync(context, logger, cancellationToken).ConfigureAwait(false); } }