/// <summary> /// Create a hotels index with the standard test documents and as many /// extra empty documents needed to test. /// </summary> /// <param name="size">The total number of documents in the index.</param> /// <returns>SearchResources for testing.</returns> public async Task <SearchResources> CreateLargeHotelsIndexAsync(int size) { // Start with the standard test hotels SearchResources resources = await SearchResources.CreateWithHotelsIndexAsync(this); // Create empty hotels with just an ID for the rest int existingDocumentCount = SearchResources.TestDocuments.Length; IEnumerable <string> hotelIds = Enumerable.Range( existingDocumentCount + 1, size - existingDocumentCount) .Select(id => id.ToString()); List <SearchDocument> hotels = hotelIds.Select(id => new SearchDocument { ["hotelId"] = id }).ToList(); // Upload the empty hotels in batches of 1000 until we're complete SearchIndexClient client = resources.GetIndexClient(); for (int i = 0; i < hotels.Count; i += 1000) { IEnumerable <SearchDocument> nextHotels = hotels.Skip(i).Take(1000); if (!nextHotels.Any()) { break; } await client.IndexDocumentsAsync(IndexDocumentsBatch.Upload(nextHotels)); await resources.WaitForIndexingAsync(); } return(resources); }
private static async Task UploadDocumentsAsync(IEnumerable <Product> documents) { var searchClient = CreateSearchIndexClient(); var batch = IndexDocumentsBatch.Upload(documents); await searchClient.IndexDocumentsAsync(batch); }
public async Task Index() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); SearchClient client = resources.GetQueryClient(); try { #region Snippet:Azure_Search_Tests_Samples_Readme_Index IndexDocumentsBatch <Hotel> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Upload(new Hotel { Id = "783", Name = "Upload Inn" }), IndexDocumentsAction.Merge(new Hotel { Id = "12", Name = "Renovated Ranch" })); IndexDocumentsOptions options = new IndexDocumentsOptions { ThrowOnAnyError = true }; client.IndexDocuments(batch, options); #endregion Snippet:Azure_Search_Tests_Samples_Readme_Index } catch (RequestFailedException) { // Ignore the non-existent merge failure } }
public async Task DeleteAsync <T>(T item, IGraphRequestContext graphRequestContext) where T : class { var client = Get <T>(graphRequestContext); var batch = IndexDocumentsBatch.Create(IndexDocumentsAction.Delete(item)); await client.IndexDocumentsAsync(batch); }
private static async Task DeleteDocumentsInternal( SearchIndexingBufferedSender <SearchDocument> indexer, string keyFieldName, IEnumerable <string> documentKeys, bool async, CancellationToken cancellationToken = default) { Argument.AssertNotNull(indexer, nameof(indexer)); Argument.AssertNotNull(keyFieldName, nameof(keyFieldName)); Argument.AssertNotNull(documentKeys, nameof(documentKeys)); var batch = IndexDocumentsBatch.Delete <SearchDocument>( documentKeys.Select(k => new SearchDocument { [keyFieldName] = k })); if (async) { await indexer.IndexDocumentsAsync(batch, cancellationToken).ConfigureAwait(false); } else { indexer.IndexDocuments(batch, cancellationToken); } }
private static void AddDocuments() { var credential = new AzureKeyCredential(adminKey); var client = new SearchClient(endpoint, indexName, credential); var action1 = IndexDocumentsAction.Upload(new Ninja { Id = Guid.NewGuid().ToString(), Name = "Naruto Uzumaki" }); var action2 = IndexDocumentsAction.Upload(new Ninja { Id = Guid.NewGuid().ToString(), Name = "Sasuke Uchiha" }); var batch = IndexDocumentsBatch.Create(action1, action2); try { IndexDocumentsResult result = client.IndexDocuments(batch); } catch (Exception) { Console.WriteLine("Failed to index some of the documents: {0}"); } }
static void UploadDocuments(SearchClient srchclient, int FileCount) { var docCounter = 0; var batchJobs = new List <IndexDocumentsBatch <SemanticScholar> >(); var batch = new IndexDocumentsBatch <SemanticScholar>(); Console.WriteLine("Creating batches for upload..."); for (var fileNum = 0; fileNum < FileCount; fileNum++) { var paddedFileNum = fileNum.ToString().PadLeft(3, '0'); var baseFileName = "s2-corpus-" + paddedFileNum + ".gz"; var fileToProcess = Path.Combine(DownloadDir, baseFileName).Replace(".gz", ""); const Int32 BufferSize = 128; using (var fileStream = File.OpenRead(fileToProcess)) using (var streamReader = new StreamReader(fileStream, Encoding.UTF8, true, BufferSize)) { String line; while ((line = streamReader.ReadLine()) != null) { docCounter += 1; if (docCounter == DocumentsToUpload) { break; } var ssDoc = JsonConvert.DeserializeObject <SemanticScholar>(line); batch.Actions.Add(IndexDocumentsAction.Upload(ssDoc)); if (docCounter % MaxBatchSize == 0) { batchJobs.Add(batch); batch = new IndexDocumentsBatch <SemanticScholar>(); if (batchJobs.Count % 100 == 0) { Console.WriteLine("Created {0} batches...", batchJobs.Count); } } } } ParallelBatchApplication(batchJobs, srchclient); batchJobs.Clear(); batch = new IndexDocumentsBatch <SemanticScholar>(); if (docCounter == DocumentsToUpload) { break; } } if (batch.Actions.Count > 0) { batchJobs.Add(batch); } ParallelBatchApplication(batchJobs, srchclient); }
public void DeleteCustomerData(CustomerIndex customerToDelete) { IndexDocumentsBatch <CustomerIndex> batch = IndexDocumentsBatch.Create(IndexDocumentsAction.Delete(customerToDelete)); IndexDocumentsOptions idxoptions = new IndexDocumentsOptions { ThrowOnAnyError = true }; _qryClient.IndexDocuments(batch, idxoptions); }
/// <summary> /// Indexes documents by calling <see cref="SearchClient.IndexDocuments{T}(IndexDocumentsBatch{T}, IndexDocumentsOptions, CancellationToken)"/>. /// </summary> /// <param name="cancellationToken">The token used to signal cancellation request.</param> public override void Run(CancellationToken cancellationToken) { SearchClient.IndexDocuments( IndexDocumentsBatch.Upload(_hotels), new IndexDocumentsOptions() { ThrowOnAnyError = true }, cancellationToken); }
/// <summary> /// Indexes documents by calling <see cref="SearchClient.IndexDocumentsAsync{T}(IndexDocumentsBatch{T}, IndexDocumentsOptions, CancellationToken)"/>. /// </summary> /// <param name="cancellationToken">The token used to signal cancellation request.</param> public override async Task RunAsync(CancellationToken cancellationToken) { await SearchClient.IndexDocumentsAsync( IndexDocumentsBatch.Upload(_hotels), new IndexDocumentsOptions() { ThrowOnAnyError = true }, cancellationToken); }
static async Task Main(string[] args) { var reviewText = "The quality of the pictures are good, but the body is not durable"; var productId = "1"; // ID of the product reviewed var reviewId = "1"; // ID of the review used as the search document ID var indexName = "sample-index"; // Cognitive Search credentials var searchKey = Environment.GetEnvironmentVariable("COGNITIVE_SEARCH_KEY"); var searchEndpoint = Environment.GetEnvironmentVariable("COGNITIVE_SEARCH_ENDPOINT"); Uri searchUri = new Uri(searchEndpoint); AzureKeyCredential searchCredential = new AzureKeyCredential(searchKey); // Initialize Search Index client var searchIndexClient = new SearchIndexClient(searchUri, searchCredential); // Create Index await CreateIndex(indexName, searchIndexClient); // Initialize Search client var searchClient = new SearchClient(searchUri, indexName, searchCredential); // TA credentials var textAnalyticsKey = Environment.GetEnvironmentVariable("TEXT_ANALYTICS_KEY"); var textAnalyticsEndpoint = Environment.GetEnvironmentVariable("TEXT_ANALYTICS_ENDPOINT"); var textAnalyticsCredentials = new AzureKeyCredential(textAnalyticsKey); var textAnalyticsUri = new Uri(textAnalyticsEndpoint); // Initialize TA client var textAnalyticsClient = new TextAnalyticsClient(textAnalyticsUri, textAnalyticsCredentials); // Enable opinion mining var options = new AnalyzeSentimentOptions() { IncludeOpinionMining = true }; // Call TA analyze sentiment api var sentimentResponse = await textAnalyticsClient.AnalyzeSentimentAsync(reviewText, language : "en", options : options); // Map to review search document Review review = CreateReviewDocument(productId, reviewId, sentimentResponse); // Upload document var batch = IndexDocumentsBatch.Create(IndexDocumentsAction.Upload(review)); try { IndexDocumentsResult result = await searchClient.IndexDocumentsAsync(batch); } catch (Exception e) { Console.WriteLine(e.Message); } }
public override Response <IndexDocumentsResult> IndexDocuments <T>(IndexDocumentsBatch <T> batch, IndexDocumentsOptions options = null, CancellationToken cancellationToken = default) { try { SplitWhenRequested(); return(ProcessResponse(base.IndexDocuments(batch, options, cancellationToken))); } finally { RaiseNotification(); } }
private static async Task AddDataAsync(SearchClient searchClient) { var translator = new Translator(); string modelName1 = "external styles are defined withing the element, inside the section of an HTML page"; string polishModelName1 = await translator.GetTranslatedTextAsync(modelName1); string[] keyWords1 = { "CSS", "styles", "metadata", "element", "EF01" }; string modelName2 = "To include an external JavaScript file, use the script tag with the attribute src"; string polishModelName2 = await translator.GetTranslatedTextAsync(modelName2); string[] keyWords2 = { "javascript", "EF02", "script", "tag", "src" }; string modelName3 = "Move to the home position"; string polishModelName3 = await translator.GetTranslatedTextAsync(modelName3); string[] keyWords3 = { "storage", "container", "EF03", "home" }; string modelName4 = "removal of stuck magnets restores machine cycle"; string polishModelName4 = await translator.GetTranslatedTextAsync(modelName4); string[] keyWords4 = { "magnets", "machine", "cycle", "EF04" }; var batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Upload(new SearchModel { Id = Guid.NewGuid().ToString(), Name = modelName1, PolishName = polishModelName1, KeyPhrases = keyWords1, Updated = new DateTime(2020, 10, 1, 7, 0, 0) }) , IndexDocumentsAction.Upload(new SearchModel { Id = Guid.NewGuid().ToString(), Name = modelName2, KeyPhrases = keyWords2, PolishName = polishModelName2, Updated = new DateTime(2020, 9, 2, 8, 54, 0) }), IndexDocumentsAction.Upload(new SearchModel { Id = Guid.NewGuid().ToString(), Name = modelName4, KeyPhrases = keyWords4, PolishName = polishModelName4, Updated = new DateTime(2020, 8, 2, 12, 11, 0) }), IndexDocumentsAction.Upload(new SearchModel { Id = Guid.NewGuid().ToString(), Name = modelName3, KeyPhrases = keyWords3, PolishName = polishModelName3, Updated = new DateTime(2020, 7, 2, 21, 21, 0) })); await searchClient.IndexDocumentsAsync(batch, new IndexDocumentsOptions { ThrowOnAnyError = true }); }
public static async Task UploadDocumentsAsync(SearchClient searchClient, List <Hotel> hotels) { var batch = IndexDocumentsBatch.Upload(hotels); try { await searchClient.IndexDocumentsAsync(batch).ConfigureAwait(false); } catch (RequestFailedException ex) { Console.WriteLine("Failed to index the documents: \n{0}", ex.Message); } }
public async Task UploadDocument(AlbumInfoSearchObject document) { IndexDocumentsBatch <AlbumInfoSearchObject> batch = IndexDocumentsBatch.Create(IndexDocumentsAction.MergeOrUpload(document)); try { IndexDocumentsResult result = await searchClient.IndexDocumentsAsync(batch); } catch (Exception ex) { throw; } }
private static void UploadDocuments(SearchClient searchClient) { IndexDocumentsBatch <Hotel> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Upload( new Hotel() { HotelId = "1", BaseRate = 199.0, Description = "Best hotel in town", DescriptionFr = "Meilleur hôtel en ville", HotelName = "Fancy Stay", Category = "Luxury", Tags = new[] { "pool", "view", "wifi", "concierge" }, ParkingIncluded = false, SmokingAllowed = false, LastRenovationDate = new DateTimeOffset(2010, 6, 27, 0, 0, 0, TimeSpan.Zero), Rating = 5, Location = GeographyPoint.Create(47.678581, -122.131577) }), IndexDocumentsAction.Upload( new Hotel() { HotelId = "2", BaseRate = 79.99, Description = "Cheapest hotel in town", DescriptionFr = "Hôtel le moins cher en ville", HotelName = "Roach Motel", Category = "Budget", Tags = new[] { "motel", "budget" }, ParkingIncluded = true, SmokingAllowed = true, LastRenovationDate = new DateTimeOffset(1982, 4, 28, 0, 0, 0, TimeSpan.Zero), Rating = 1, Location = GeographyPoint.Create(49.678581, -122.131577) })); try { IndexDocumentsResult result = searchClient.IndexDocuments(batch); } catch (Exception) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. Console.WriteLine("Failed to index some of the documents: {0}"); } Console.WriteLine("Waiting for documents to be indexed...\n"); Thread.Sleep(2000); }
/// <summary> /// Añade o elimina items dentro de azure search. /// </summary> /// <typeparam name="T">El tipo solo puede ser una entidad soportada dentro de azure search, se validará que cumpla</typeparam> /// <param name="elements">elementos a guardar dentro del search</param> /// <param name="operationType">Tipo de operación Añadir o borrar</param> private void OperationElements <T>(List <T> elements, SearchOperation operationType) { // validar que sea un elemento de tipo search. var indexName = Index; // realiza la acción segun el argumento var actions = elements.Select(o => operationType == SearchOperation.Add ? IndexDocumentsAction.Upload(o) : IndexDocumentsAction.Delete(o)).ToArray(); // preparando la ejecución var batch = IndexDocumentsBatch.Create(actions); // ejecución. _search.IndexDocuments(batch); }
public async Task ExecuteAsync(IndexCommand[] commands, CancellationToken ct = default) { var batch = IndexDocumentsBatch.Create <SearchDocument>(); commands.Foreach(x => CommandFactory.CreateCommands(x, batch.Actions)); if (batch.Actions.Count == 0) { return; } await searchClient.IndexDocumentsAsync(batch, cancellationToken : ct); }
public async Task RecentlyIndexedDynamicDocument() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); Hotel document = SearchResources.TestDocuments[0]; await resources.GetIndexClient().IndexDocumentsAsync( IndexDocumentsBatch.Upload(new[] { document.AsDocument() })); await resources.WaitForIndexingAsync(); Response <Hotel> response = await resources.GetQueryClient().GetDocumentAsync <Hotel>(document.HotelId); Assert.AreEqual(document.HotelId, response.Value.HotelId); }
public async Task VerifyRoundtrip <T>( Func <T, string> getKey, T document, T expected = default, GetDocumentOptions options = null) { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); await resources.GetIndexClient().IndexDocumentsAsync <T>( IndexDocumentsBatch.Upload <T>(new[] { document })); await resources.WaitForIndexingAsync(); Response <T> response = await resources.GetQueryClient().GetDocumentAsync <T>(getKey(document), options); // Only validate expected properties AssertApproximate(expected ?? document, response.Value); }
/// <summary> /// Populates the Azure Search index with `<paramref name="documentCount"/>` number of documents, each of `<paramref name="documentSize"/>` size. /// </summary> /// <param name="documentCount">Number of documents to index.</param> /// <param name="documentSize">Size of each document being indexed.</param> /// <returns>Task representing the asynchronous work.</returns> protected async Task PopulateIndexAsync(int documentCount, DocumentSize documentSize) { List <Hotel> hotels = DocumentGenerator.GenerateHotels(documentCount, documentSize); await SearchClient.IndexDocumentsAsync(IndexDocumentsBatch.Upload(hotels), new IndexDocumentsOptions() { ThrowOnAnyError = true }); long uploadedDocumentCount = 0; while (uploadedDocumentCount != documentCount) { uploadedDocumentCount = (await SearchClient.GetDocumentCountAsync()).Value; Thread.Sleep(1000); } }
public async Task Structs() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); SimpleStructHotel document = new SimpleStructHotel { HotelId = "4", HotelName = "Value Inn" }; await resources.GetIndexClient().IndexDocumentsAsync( IndexDocumentsBatch.Upload(new[] { document })); await resources.WaitForIndexingAsync(); SearchIndexClient client = resources.GetQueryClient(); Response <SimpleStructHotel> response = await client.GetDocumentAsync <SimpleStructHotel>(document.HotelId); Assert.AreEqual(document, response.Value); }
public async Task <int> BuildIndexAsync(IEnumerable <SearchLocationIndex> searchLocations) { logger.LogInformation($"Starting to build index for {searchLocations.Count()}"); try { var searchIndexClient = new SearchIndexClient(azureSearchIndexConfig.EndpointUri, GetAzureKeyCredential()); var searchClient = new SearchClient(azureSearchIndexConfig.EndpointUri, azureSearchIndexConfig.LocationSearchIndex, GetAzureKeyCredential()); var fieldBuilder = new FieldBuilder(); var searchFields = fieldBuilder.Build(typeof(SearchLocationIndex)); var definition = new SearchIndex(azureSearchIndexConfig.LocationSearchIndex, searchFields); var suggester = new SearchSuggester(suggestorName, new[] { nameof(SearchLocationIndex.LocationName) }); definition.Suggesters.Add(suggester); logger.LogInformation("created search objects and creating index"); await searchIndexClient.CreateOrUpdateIndexAsync(definition).ConfigureAwait(false); logger.LogInformation("Created search index and uploading documents"); var batch = IndexDocumentsBatch.Upload(searchLocations); IndexDocumentsResult result = await searchClient.IndexDocumentsAsync(batch).ConfigureAwait(false); var failedRecords = result.Results.Where(r => !r.Succeeded); if (failedRecords.Any()) { var sampleFailedRecord = failedRecords.FirstOrDefault(); var sampleMessage = $"{failedRecords.Count()} have failed to upload to the index, sample failed record message {sampleFailedRecord.ErrorMessage}, Status = {sampleFailedRecord.Status}"; logger.LogError(sampleMessage); throw new DfcIndexUploadException("sampleMessage"); } logger.LogInformation($"Created search index and uploaded {result.Results.Count} documents"); return(result.Results.Count); } catch (Exception ex) { logger.LogError("Building index had an error", ex); throw; } }
private static void IndexDocuments(string indexName, List <string> groups) { IndexDocumentsBatch <SecuredFiles> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Upload( new SecuredFiles() { FileId = "1", Name = "secured_file_a", GroupIds = new[] { groups[0] } }), IndexDocumentsAction.Upload( new SecuredFiles() { FileId = "2", Name = "secured_file_b", GroupIds = new[] { groups[0] } }), IndexDocumentsAction.Upload( new SecuredFiles() { FileId = "3", Name = "secured_file_c", GroupIds = new[] { groups[1] } })); try { IndexDocumentsResult result = searchClient.IndexDocuments(batch); } catch (Exception) { // Sometimes when your Search service is under load, indexing will fail for some of the documents in // the batch. Depending on your application, you can take compensating actions like delaying and // retrying. For this simple demo, we just log the failed document keys and continue. Console.WriteLine("Failed to index some of the documents: {0}"); } Console.WriteLine("Waiting for documents to be indexed...\n"); Thread.Sleep(2000); }
public async Task Convenience_None() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(3); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); AssertNoFailures(indexer); IndexDocumentsBatch <SimpleDocument> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Delete <SimpleDocument>(data[0]), IndexDocumentsAction.Upload <SimpleDocument>(data[1]), IndexDocumentsAction.MergeOrUpload <SimpleDocument>(data[2])); await indexer.IndexDocumentsAsync(batch); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), 2); }
public async Task EmptyValuesDynamicDocument() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); SearchDocument document = new SearchDocument { ["hotelId"] = "1", ["hotelName"] = null, ["tags"] = new object[0], ["parkingIncluded"] = null, ["lastRenovationDate"] = null, ["rating"] = null, ["location"] = null, ["geoLocation"] = null, ["address"] = null, ["rooms"] = new[] { new SearchDocument { ["baseRate"] = null, ["bedOptions"] = null, ["sleepsCount"] = null, ["smokingAllowed"] = null, ["tags"] = new object[0] } } }; await resources.GetSearchClient().IndexDocumentsAsync( IndexDocumentsBatch.Upload(new[] { document })); await resources.WaitForIndexingAsync(); Response <SearchDocument> response = await resources.GetQueryClient().GetDocumentAsync <SearchDocument>((string)document["hotelId"]); Assert.AreEqual(document["hotelId"], response.Value["hotelId"]); }
public async Task CreateAndPopulateIndexAsync <T>(string indexName, List <T> entries) { var serviceClient = _factory.SearchIndexClient; await DeleteIndexAsync(indexName).ConfigureAwait(false); var definition = new SearchIndex(indexName) { Fields = new FieldBuilder().Build(typeof(T)) }; await serviceClient.CreateIndexAsync(definition).ConfigureAwait(false); var indexClient = serviceClient.GetSearchClient(indexName); var batch = IndexDocumentsBatch.Create <T>(); _logger.LogInformation("Indexing {Count} entries for Index {IndexName}", entries.Count, indexName); for (var counter = 0; counter < entries.Count; counter++) { batch.Actions.Add(IndexDocumentsAction.Upload(entries[counter])); if (counter % 5000 == 0) //it's too big for one batch { await indexClient.IndexDocumentsAsync(batch, new Azure.Search.Documents.IndexDocumentsOptions { ThrowOnAnyError = true }).ConfigureAwait(false); batch = IndexDocumentsBatch.Create <T>(); _logger.LogInformation("Submitted {Counter} of {Total} for Index {IndexName}", counter, entries.Count, indexName); } } await indexClient.IndexDocumentsAsync(batch, new Azure.Search.Documents.IndexDocumentsOptions { ThrowOnAnyError = true }).ConfigureAwait(false); _logger.LogInformation("Completed index submission for Index {IndexName}", indexName); }
private static void MergeOrUploadDocuments(SearchClient searchClient) { IndexDocumentsBatch <Hotel> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.MergeOrUpload( new Hotel() { HotelId = "4", Rating = 1, Tags = new[] { "concierge", "view", "24-hour front desk service" }, }) ); try { IndexDocumentsResult result = searchClient.IndexDocuments(batch); } catch (Exception e) { // If for some reason any documents are dropped during indexing, you can compensate by delaying and // retrying. This simple demo just logs the failed document keys and continues. Console.WriteLine($"Failed to index some of the documents: {e}"); } }
private async Task <Response <IndexDocumentsResult> > IndexProduct(SearchClient searchClient, ProductDocument product) { var batch = IndexDocumentsBatch.Create(IndexDocumentsAction.Upload(product)); return(await searchClient.IndexDocumentsAsync(batch)); }
/// <summary> /// Send indexing actions to be processed by the service. /// </summary> /// <param name="batch">The batch of actions to submit.</param> /// <param name="cancellationToken">A cancellation token.</param> /// <returns>Whether the submission was throttled.</returns> protected override async Task <bool> OnSubmitBatchAsync(IList <PublisherAction <IndexDocumentsAction <T> > > batch, CancellationToken cancellationToken) { // Bail early if someone sent an empty batch if (batch.Count == 0) { return(false); } // Notify the action is being sent foreach (PublisherAction <IndexDocumentsAction <T> > action in batch) { await _sender.OnActionSentAsync(action.Document, cancellationToken).ConfigureAwait(false); } AzureSearchDocumentsEventSource.Instance.BatchSubmitted(_sender.Endpoint.AbsoluteUri, batch.Count); // Send the request to the service Response <IndexDocumentsResult> response = null; try { response = await _sender.SearchClient.IndexDocumentsAsync( IndexDocumentsBatch.Create(batch.Select(a => a.Document).ToArray()), cancellationToken : cancellationToken) .ConfigureAwait(false); } // Handle batch level failures catch (RequestFailedException ex) when(ex.Status == 413) // Payload Too Large { int oldBatchActionCount = BatchActionCount; // Split the batch and try with smaller payloads // Update 'BatchActionCount' so future submissions can avoid this error. BatchActionCount = (int)Math.Floor((double)batch.Count / 2.0); AzureSearchDocumentsEventSource.Instance.BatchActionCountUpdated(_sender.Endpoint.AbsoluteUri, oldBatchActionCount, BatchActionCount); var smaller = new List <PublisherAction <IndexDocumentsAction <T> > >(batch.Take(BatchActionCount)); // Add the second half to the retry queue without counting this as a retry attempt EnqueueRetry(batch.Skip(BatchActionCount)); // Try resubmitting with just the smaller half await SubmitBatchAsync(smaller, cancellationToken).ConfigureAwait(false); return(false); } catch (Exception ex) { // Retry the whole batch using the same exception for everything foreach (PublisherAction <IndexDocumentsAction <T> > action in batch) { await EnqueueOrFailRetryAsync(action, null, ex, cancellationToken).ConfigureAwait(false); } // Search currently uses 503s for throttling return(ex is RequestFailedException failure && failure.Status == 503); } // Handle individual responses which might be success or failure bool throttled = false; foreach ((PublisherAction <IndexDocumentsAction <T> > action, IndexingResult result) in AssociateResults(batch, response.Value.Results)) { // Search currently uses 503s for throttling throttled |= (result.Status == 503); Debug.Assert(action.Key == result.Key); if (result.Succeeded) { await _sender.OnActionCompletedAsync( action.Document, result, cancellationToken) .ConfigureAwait(false); } else if (IsRetriable(result.Status)) { await EnqueueOrFailRetryAsync( action, result, exception : null, cancellationToken) .ConfigureAwait(false); } else { await _sender.OnActionFailedAsync( action.Document, result, exception : null, cancellationToken) .ConfigureAwait(false); } } return(throttled); }