/// <summary> /// Creates a new SearchIndexingPublisher which immediately starts /// listening to process requests. /// </summary> /// <param name="sender">The sender that produces actions.</param> /// <param name="autoFlush"> /// A value indicating whether the publisher should automatically flush. /// </param> /// <param name="autoFlushInterval"> /// An optional amount of time to wait before automatically flushing. /// </param> /// <param name="batchActionSize"> /// The number of actions to group into a batch. /// </param> /// <param name="batchPayloadSize"> /// The number of bytes to use when tuning the behavior of the /// publisher. /// </param> /// <param name="retryCount"> /// The number of times to retry a failed document. /// </param> /// <param name="publisherCancellationToken"> /// A <see cref="CancellationToken"/> to use when publishing. /// </param> public SearchIndexingPublisher( SearchIndexingBufferedSender <T> sender, bool autoFlush, TimeSpan?autoFlushInterval, int?batchActionSize, int?batchPayloadSize, int?retryCount, CancellationToken publisherCancellationToken) : base(autoFlush, autoFlushInterval, batchActionSize, batchPayloadSize, retryCount, publisherCancellationToken) { _sender = sender; }
/// <summary> /// Initializes a new instance of the <see cref="IndexActionEventArgs{T}"/> /// class. /// </summary> /// <param name="sender"> /// The <see cref="SearchIndexingBufferedSender{T}"/> raising the event. /// </param> /// <param name="action"> /// The <see cref="IndexDocumentsAction{T}"/> that was added, sent, /// completed, or failed. /// </param> /// <param name="isRunningSynchronously"> /// A value indicating whether the event handler was invoked /// synchronously or asynchronously. Please see /// <see cref="Azure.Core.SyncAsyncEventHandler{T}"/> for more details. /// </param> /// <param name="cancellationToken"> /// A cancellation token related to the original operation that raised /// the event. It's important for your handler to pass this token /// along to any asynchronous or long-running synchronous operations /// that take a token so cancellation will correctly propagate. The /// default value is <see cref="CancellationToken.None"/>. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown if <paramref name="sender"/> or <paramref name="action"/> /// are null. /// </exception> public IndexActionEventArgs( SearchIndexingBufferedSender <T> sender, IndexDocumentsAction <T> action, bool isRunningSynchronously, CancellationToken cancellationToken = default) : base(isRunningSynchronously, cancellationToken) { Argument.AssertNotNull(sender, nameof(sender)); Argument.AssertNotNull(action, nameof(action)); Sender = sender; Action = action; }
/// <summary> /// Initializes a new instance of the /// <see cref="IndexActionCompletedEventArgs{T}"/> class. /// </summary> /// <param name="sender"> /// The <see cref="SearchIndexingBufferedSender{T}"/> raising the event. /// </param> /// <param name="action"> /// The <see cref="IndexDocumentsAction{T}"/> that failed. /// </param> /// <param name="result"> /// The <see cref="IndexingResult"/> of an action that failed to /// complete. /// </param> /// <param name="exception"> /// the <see cref="Exception"/> caused by an action that failed to /// complete. /// </param> /// <param name="isRunningSynchronously"> /// A value indicating whether the event handler was invoked /// synchronously or asynchronously. Please see /// <see cref="Azure.Core.SyncAsyncEventHandler{T}"/> for more details. /// </param> /// <param name="cancellationToken"> /// A cancellation token related to the original operation that raised /// the event. It's important for your handler to pass this token /// along to any asynchronous or long-running synchronous operations /// that take a token so cancellation will correctly propagate. The /// default value is <see cref="CancellationToken.None"/>. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown if <paramref name="sender"/> or <paramref name="action"/> /// are null. /// </exception> public IndexActionFailedEventArgs( SearchIndexingBufferedSender <T> sender, IndexDocumentsAction <T> action, IndexingResult result, Exception exception, bool isRunningSynchronously, CancellationToken cancellationToken = default) : base(sender, action, isRunningSynchronously, cancellationToken) { // Do not validate - either might be null Result = result; Exception = exception; }
private static ConcurrentQueue <IndexDocumentsAction <T> > TrackFailures <T>(SearchIndexingBufferedSender <T> indexer) { ConcurrentQueue <IndexDocumentsAction <T> > failures = new ConcurrentQueue <IndexDocumentsAction <T> >(); indexer.ActionFailedAsync += (IndexDocumentsAction <T> doc, IndexingResult result, Exception ex, CancellationToken cancellationToken) => { failures.Enqueue(doc); return(Task.CompletedTask); }; return(failures); }
public async Task KeyFieldAccessor_FetchIndex() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); UnbuildableDocument[] data = UnbuildableDocument.GetDocuments(10); await using SearchIndexingBufferedSender <UnbuildableDocument> indexer = client.CreateIndexingBufferedSender <UnbuildableDocument>(); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Convenience_Merge() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); ConcurrentQueue <IndexDocumentsAction <SimpleDocument> > failures = TrackFailures(indexer); await indexer.MergeDocumentsAsync(data); await indexer.FlushAsync(); Assert.AreEqual(data.Length, failures.Count); }
public async Task Convenience_MergeOrUpload() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); AssertNoFailures(indexer); await indexer.MergeOrUploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Behavior_Split() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); BatchingSearchClient client = GetBatchingSearchClient(resources); SimpleDocument[] data = SimpleDocument.GetDocuments(BatchSize); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>(); AssertNoFailures(indexer); client.SplitNextBatch = true; await indexer.UploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Dispose_UndisposedNoCrash() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlush = false }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); }
public async Task Champion_FineGrainedErrors() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(1000); // Don't touch the failures outside of the event handler until // we've finished flushing List <IndexingResult> failures = new List <IndexingResult>(); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlush = false }); indexer.ActionFailedAsync += (IndexDocumentsAction <SimpleDocument> doc, IndexingResult result, Exception ex, CancellationToken cancellationToken) => { failures.Add(result); return(Task.CompletedTask); }; await indexer.UploadDocumentsAsync(data.Take(500)); await indexer.MergeDocumentsAsync(new[] { new SimpleDocument { Id = "Fake" } }); await indexer.UploadDocumentsAsync(data.Skip(500)); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), 1000); Assert.AreEqual(1, failures.Count); Assert.AreEqual("Fake", failures[0].Key); Assert.AreEqual(404, failures[0].Status); }
public async Task Champion_OneShotUpload() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(50000); // Wrap in a block so we DisposeAsync before getting the Count below { await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>(); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); } // Check that we have the correct number of documents await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Dispose_Flushes() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlush = false }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await((IAsyncDisposable)indexer).DisposeAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task KeyFieldAccessor_Error() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); Hotel[] data = SearchResources.TestDocuments; await using SearchIndexingBufferedSender <Hotel> indexer = client.CreateIndexingBufferedSender <Hotel>(); AssertNoFailures(indexer); try { await indexer.UploadDocumentsAsync(data); } catch (InvalidOperationException ex) { StringAssert.Contains(nameof(Hotel), ex.Message); } }
public async Task Notifications_Added() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); int adds = 0; indexer.ActionAddedAsync += (a, c) => { adds++; return(Task.CompletedTask); }; await indexer.UploadDocumentsAsync(data); await DelayAsync(EventDelay, EventDelay); Assert.AreEqual(data.Length, adds); }
public async Task AutoFlushInterval_TinyInterval() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); BatchingSearchClient client = GetBatchingSearchClient(resources); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlushInterval = TimeSpan.FromMilliseconds(10) }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(TimeSpan.FromSeconds(5), EventDelay); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Convenience_None() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(3); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); AssertNoFailures(indexer); IndexDocumentsBatch <SimpleDocument> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Delete <SimpleDocument>(data[0]), IndexDocumentsAction.Upload <SimpleDocument>(data[1]), IndexDocumentsAction.MergeOrUpload <SimpleDocument>(data[2])); await indexer.IndexDocumentsAsync(batch); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), 2); }
public async Task BufferedSender() { await using SearchResources resources = SearchResources.CreateWithNoIndexes(this); SearchClient searchClient = null; try { searchClient = await CreateIndexAsync(resources); // Simple { searchClient = GetOriginal(searchClient); #region Snippet:Azure_Search_Documents_Tests_Samples_Sample05_IndexingDocuments_BufferedSender1 await using SearchIndexingBufferedSender <Product> indexer = new SearchIndexingBufferedSender <Product>(searchClient); await indexer.UploadDocumentsAsync(GenerateCatalog(count : 100000)); #endregion } await WaitForDocumentCountAsync(searchClient, 100000); // Check #region Snippet:Azure_Search_Documents_Tests_Samples_Sample05_IndexingDocuments_BufferedSender2 #if SNIPPET await indexer.FlushAsync(); #endif Assert.AreEqual(100000, (int)await searchClient.GetDocumentCountAsync()); #endregion } finally { if (searchClient != null) { await resources.GetIndexClient().DeleteIndexAsync(searchClient.IndexName); } } }
private static void AssertNoFailures <T>(SearchIndexingBufferedSender <T> indexer) { indexer.ActionFailedAsync += (IndexDocumentsAction <T> doc, IndexingResult result, Exception ex, CancellationToken cancellationToken) => { StringBuilder message = new StringBuilder(); if (result != null) { Assert.IsFalse(result.Succeeded); message.AppendLine($"key {result.Key} failed with {result.Status}: {result.ErrorMessage}"); } if (message != null) { message.AppendLine(ex.ToString()); } Assert.Fail(message.ToString()); return(Task.CompletedTask); }; }
public async Task Champion_FlushAfterInterval() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(20); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlushInterval = TimeSpan.FromMilliseconds(100) }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); await DelayAsync(TimeSpan.FromMilliseconds(100)); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task AutoFlushInterval_DoesNotFire(int?interval) { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(BatchSize / 2); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlushInterval = interval != null ? (TimeSpan?)TimeSpan.FromMilliseconds(interval.Value) : null }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(TimeSpan.FromSeconds(3), EventDelay); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); }
public async Task Champion_ManualFlushing() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(1000); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlush = false }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
private static ConcurrentDictionary <int, IndexDocumentsAction <T> > TrackPending <T>(SearchIndexingBufferedSender <T> indexer) { ConcurrentDictionary <int, IndexDocumentsAction <T> > pending = new ConcurrentDictionary <int, IndexDocumentsAction <T> >(); indexer.ActionAddedAsync += (IndexDocumentsAction <T> doc, CancellationToken cancellationToken) => { pending[doc.GetHashCode()] = doc; return(Task.CompletedTask); }; indexer.ActionCompletedAsync += (IndexDocumentsAction <T> doc, IndexingResult result, CancellationToken cancellationToken) => { pending.TryRemove(doc.GetHashCode(), out IndexDocumentsAction <T> _); return(Task.CompletedTask); }; indexer.ActionFailedAsync += (IndexDocumentsAction <T> doc, IndexingResult result, Exception ex, CancellationToken cancellationToken) => { pending.TryRemove(doc.GetHashCode(), out IndexDocumentsAction <T> _); return(Task.CompletedTask); }; return(pending); }