public static async Task DeleteItemsConcurrentlyAsync( Container container, IReadOnlyList <MyItem> documentsToWorkWith) { // <BulkDelete> BulkOperations <MyItem> bulkOperations = new BulkOperations <MyItem>(documentsToWorkWith.Count); foreach (MyItem document in documentsToWorkWith) { document.operationCounter++; bulkOperations.Tasks.Add(container.DeleteItemAsync <MyItem>(document.id, new PartitionKey(document.pk)).CaptureOperationResponse(document)); } // </BulkDelete> BulkOperationResponse <MyItem> bulkOperationResponse = await bulkOperations.ExecuteAsync(); Console.WriteLine($"Bulk update operation finished in {bulkOperationResponse.TotalTimeTaken}"); Console.WriteLine($"Consumed {bulkOperationResponse.TotalRequestUnitsConsumed} RUs in total"); Console.WriteLine($"Deleted {bulkOperationResponse.SuccessfulDocuments} documents"); Console.WriteLine($"Failed {bulkOperationResponse.Failures.Count} documents"); if (bulkOperationResponse.Failures.Count > 0) { Console.WriteLine($"First failed sample document {bulkOperationResponse.Failures[0].Item1.id} - {bulkOperationResponse.Failures[0].Item2}"); } }
public static void LogBulkOperationResponse <T>(this ILogger logger, BulkOperationResponse <T> response) { logger.LogInformation("BulkOperationResponse - TotalTimeTaken: {TotalTimeTaken}, TotalRUs: {TotalRUs}, SuccessCount: {SuccessfulDocuments}, FailureCount: {Failures}", response.TotalTimeTaken, response.TotalRequestUnitsConsumed, response.Successes.Count, response.Failures.Count); }
public List <BulkOperationResponse> SubmitBulkOperations(string callingUrl, Guid sessionId, BulkOperations operations) { List <BulkOperationResponse> operationResponses = new List <BulkOperationResponse>(); foreach (IBulkOperation operation in operations.Operations) { BulkOperationResponse operationResponse = operation.SubmitOperation(this, callingUrl, sessionId); operationResponses.Add(operationResponse); } return(operationResponses); }
private async Task ProcessChangesAsync(IReadOnlyCollection <DocumentMetadata> docs, CancellationToken cancellationToken) { try { Boolean isSyntheticKey = this.SourcePartitionKeys.Contains(","); Boolean isNestedAttribute = this.SourcePartitionKeys.Contains("/"); Container targetContainer = this.destinationCollectionClient.GetContainer(this.config.DestDbName, this.config.DestCollectionName); this.containerToStoreDocuments = targetContainer; DocumentMetadata document; BulkOperations <DocumentMetadata> bulkOperations = new BulkOperations <DocumentMetadata>(docs.Count); foreach (DocumentMetadata doc in docs) { document = (this.SourcePartitionKeys != null & this.TargetPartitionKey != null) ? MapPartitionKey(doc, isSyntheticKey, this.TargetPartitionKey, isNestedAttribute, this.SourcePartitionKeys) : document = doc; if (this.config.OnlyInsertMissingItems) { bulkOperations.Tasks.Add(this.containerToStoreDocuments.CreateItemAsync( item: document, cancellationToken: cancellationToken).CaptureOperationResponse(document, ignoreConflicts: true)); } else { bulkOperations.Tasks.Add(this.containerToStoreDocuments.UpsertItemAsync( item: document, cancellationToken: cancellationToken).CaptureOperationResponse(document, ignoreConflicts: true)); } } BulkOperationResponse <DocumentMetadata> bulkOperationResponse = await bulkOperations.ExecuteAsync().ConfigureAwait(false); if (bulkOperationResponse.Failures.Count > 0 && this.deadletterClient != null) { await this.WriteFailedDocsToBlob("FailedImportDocs", this.deadletterClient, bulkOperationResponse) .ConfigureAwait(false); } TelemetryHelper.Singleton.LogMetrics(bulkOperationResponse); } catch (Exception error) { TelemetryHelper.Singleton.LogError( "Processing changes in change feed processor {0} failed: {1}", this.processorName, error); throw; } }
private async Task WriteFailedDocsToBlob( string failureType, BlobContainerClient containerClient, BulkOperationResponse <DocumentMetadata> bulkOperationResponse) { try { string failedDocs; string failures; byte[] byteArray; BlobClient blobClient = containerClient.GetBlobClient(failureType + Guid.NewGuid().ToString() + ".csv"); failures = JsonConvert.SerializeObject(String.Join(",", bulkOperationResponse.DocFailures)); failedDocs = JsonConvert.SerializeObject(String.Join(",", bulkOperationResponse.FailedDocs)); failedDocs = failedDocLineFeedRemoverRegex.Replace(failedDocs, String.Empty); byteArray = Encoding.ASCII.GetBytes(failures + "|" + bulkOperationResponse.Failures.Count + "|" + failedDocs); using (MemoryStream ms = new MemoryStream(byteArray)) { await blobClient .UploadAsync(ms, overwrite : true) .ConfigureAwait(false); } TelemetryHelper.Singleton.LogWarning( "Processor {0} - FAILED TO INGEST DOCUMENTS: Writing {1} failed documents to the deadletter blob store.", this.processorName, bulkOperationResponse.FailedDocs.Count); } catch (Exception error) { TelemetryHelper.Singleton.LogError( "Change feed processor {0} - Writing document to deadletter blob store failed: {1}", this.processorName, error); throw; } }
// </Initialization> public static async Task CreateItemsConcurrentlyAsync( Container container, IReadOnlyList <MyItem> documentsToWorkWith) { // <BulkImport> List <Task <OperationResponse <MyItem> > > operations = new List <Task <OperationResponse <MyItem> > >(documentsToWorkWith.Count); foreach (MyItem document in documentsToWorkWith) { operations.Add(container.CreateItemAsync(document, new PartitionKey(document.pk)).CaptureOperationResponse(document)); } // </BulkImport> BulkOperationResponse <MyItem> bulkOperationResponse = await ExecuteTasksAsync(operations); Console.WriteLine($"Bulk create operation finished in {bulkOperationResponse.TotalTimeTaken}"); Console.WriteLine($"Consumed {bulkOperationResponse.TotalRequestUnitsConsumed} RUs in total"); Console.WriteLine($"Created {bulkOperationResponse.SuccessfulDocuments} documents"); Console.WriteLine($"Failed {bulkOperationResponse.Failures.Count} documents"); if (bulkOperationResponse.Failures.Count > 0) { Console.WriteLine($"First failed sample document {bulkOperationResponse.Failures[0].Item1.id} - {bulkOperationResponse.Failures[0].Item2}"); } }