public async Task KeyFieldAccessor_Custom() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(10); bool customAccessorInvoked = false; await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { KeyFieldAccessor = (SimpleDocument doc) => { customAccessorInvoked = true; return(doc.Id); } }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); Assert.IsTrue(customAccessorInvoked); }
public async Task Dispose_Blocks() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlush = false }); AssertNoFailures(indexer); ConcurrentDictionary <int, IndexDocumentsAction <SimpleDocument> > pending = TrackPending(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(EventDelay, EventDelay); Assert.AreEqual(data.Length, pending.Count); await((IAsyncDisposable)indexer).DisposeAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task AutoFlush_PartialBatch() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(BatchSize / 2); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlushInterval = null }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(TimeSpan.FromSeconds(5), EventDelay); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Champion_ContinueAddingWhileSending() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(1000); // Wrap in a block so we DisposeAsync before getting the Count below { await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>(); AssertNoFailures(indexer); // Trickle documents in for (int i = 0; i < data.Length; i++) { await indexer.UploadDocumentsAsync(new[] { data[i] }); await DelayAsync(TimeSpan.FromMilliseconds(5)); } } // Check that we have the correct number of documents await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task AutoFlush_MultipleBatches() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 3.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlushInterval = null }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(TimeSpan.FromSeconds(10), EventDelay); await WaitForDocumentCountAsync(resources.GetSearchClient(), 3 *BatchSize, delay : TimeSpan.FromSeconds(5)); // Check that we have the correct number of documents await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task AutoFlushInterval_FullBatch() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); BatchingSearchClient client = GetBatchingSearchClient(resources); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlushInterval = TimeSpan.FromMilliseconds(500) }); AssertNoFailures(indexer); ConcurrentDictionary <int, IndexDocumentsAction <SimpleDocument> > pending = TrackPending(indexer); Task <object> submitted = client.BatchSubmitted; await indexer.UploadDocumentsAsync(data); await submitted; await DelayAsync(EventDelay, EventDelay); Assert.AreEqual(data.Length - BatchSize, pending.Count); await DelayAsync(TimeSpan.FromSeconds(5), EventDelay); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
/// <summary> /// Create a hotels index with the standard test documents and as many /// extra empty documents needed to test. /// </summary> /// <param name="size">The total number of documents in the index.</param> /// <returns>SearchResources for testing.</returns> public async Task <SearchResources> CreateLargeHotelsIndexAsync(int size) { // Start with the standard test hotels SearchResources resources = await SearchResources.CreateWithHotelsIndexAsync(this); // Create empty hotels with just an ID for the rest int existingDocumentCount = SearchResources.TestDocuments.Length; IEnumerable <string> hotelIds = Enumerable.Range( existingDocumentCount + 1, size - existingDocumentCount) .Select(id => id.ToString()); List <SearchDocument> hotels = hotelIds.Select(id => new SearchDocument { ["hotelId"] = id }).ToList(); // Upload the empty hotels in batches of 1000 until we're complete SearchClient client = resources.GetSearchClient(); for (int i = 0; i < hotels.Count; i += 1000) { IEnumerable <SearchDocument> nextHotels = hotels.Skip(i).Take(1000); if (!nextHotels.Any()) { break; } await client.IndexDocumentsAsync(IndexDocumentsBatch.Upload(nextHotels)); await resources.WaitForIndexingAsync(); } return(resources); }
public async Task KeyFieldAccessor_FetchIndex() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); UnbuildableDocument[] data = UnbuildableDocument.GetDocuments(10); await using SearchIndexingBufferedSender <UnbuildableDocument> indexer = client.CreateIndexingBufferedSender <UnbuildableDocument>(); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Champion_BasicCheckpointing() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(1000); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlush = true, AutoFlushInterval = null }); List <IndexDocumentsAction <SimpleDocument> > pending = new List <IndexDocumentsAction <SimpleDocument> >(); indexer.ActionAddedAsync += (IndexDocumentsAction <SimpleDocument> doc, CancellationToken cancellationToken) => { pending.Add(doc); return(Task.CompletedTask); }; indexer.ActionCompletedAsync += (IndexDocumentsAction <SimpleDocument> doc, IndexingResult result, CancellationToken cancellationToken) => { pending.Remove(doc); return(Task.CompletedTask); }; indexer.ActionFailedAsync += (IndexDocumentsAction <SimpleDocument> doc, IndexingResult result, Exception ex, CancellationToken cancellationToken) => { pending.Remove(doc); return(Task.CompletedTask); }; await indexer.UploadDocumentsAsync(data.Take(500)); await indexer.MergeDocumentsAsync(new[] { new SimpleDocument { Id = "Fake" } }); await indexer.UploadDocumentsAsync(data.Skip(500)); await DelayAsync(TimeSpan.FromSeconds(5), TimeSpan.FromMilliseconds(250)); Assert.AreEqual(1001 - BatchSize, pending.Count); await indexer.FlushAsync(); Assert.AreEqual(0, pending.Count); }
public async Task Convenience_MergeOrUpload() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); AssertNoFailures(indexer); await indexer.MergeOrUploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Champion_FineGrainedErrors() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(1000); // Don't touch the failures outside of the event handler until // we've finished flushing List <IndexingResult> failures = new List <IndexingResult>(); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlush = false }); indexer.ActionFailedAsync += (IndexDocumentsAction <SimpleDocument> doc, IndexingResult result, Exception ex, CancellationToken cancellationToken) => { failures.Add(result); return(Task.CompletedTask); }; await indexer.UploadDocumentsAsync(data.Take(500)); await indexer.MergeDocumentsAsync(new[] { new SimpleDocument { Id = "Fake" } }); await indexer.UploadDocumentsAsync(data.Skip(500)); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), 1000); Assert.AreEqual(1, failures.Count); Assert.AreEqual("Fake", failures[0].Key); Assert.AreEqual(404, failures[0].Status); }
public async Task Champion_OneShotUpload() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(50000); // Wrap in a block so we DisposeAsync before getting the Count below { await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>(); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); } // Check that we have the correct number of documents await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task GetDocumentCount() { await using SearchResources search = await SearchResources.GetSharedHotelsIndexAsync(this); SearchClient client = search.GetSearchClient(); Response <long> response = await client.GetDocumentCountAsync(); Assert.AreEqual(200, response.GetRawResponse().Status); Assert.AreEqual(SearchResources.TestDocuments.Length, response.Value); }
public async Task ClientRequestIdRountrips() { await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this); SearchClient client = resources.GetSearchClient(); Guid id = Recording.Random.NewGuid(); Response <long> response = await client.GetDocumentCountAsync( new SearchRequestOptions { ClientRequestId = id }); Assert.AreEqual(id.ToString(), response.GetRawResponse().ClientRequestId); }
public async Task Dispose_Flushes() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlush = false }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await((IAsyncDisposable)indexer).DisposeAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Convenience_None() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(3); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); AssertNoFailures(indexer); IndexDocumentsBatch <SimpleDocument> batch = IndexDocumentsBatch.Create( IndexDocumentsAction.Delete <SimpleDocument>(data[0]), IndexDocumentsAction.Upload <SimpleDocument>(data[1]), IndexDocumentsAction.MergeOrUpload <SimpleDocument>(data[2])); await indexer.IndexDocumentsAsync(batch); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), 2); }
public async Task Champion_FlushAfterInterval() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(20); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlushInterval = TimeSpan.FromMilliseconds(100) }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); await DelayAsync(TimeSpan.FromMilliseconds(100)); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task AutoFlushInterval_DoesNotFire(int?interval) { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(BatchSize / 2); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlushInterval = interval != null ? (TimeSpan?)TimeSpan.FromMilliseconds(interval.Value) : null }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(TimeSpan.FromSeconds(3), EventDelay); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); }
public async Task ClientRequestIdRountrips() { await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this); SearchClient client = resources.GetSearchClient(); Guid id = Recording.Random.NewGuid(); Response <long> response = await client.GetDocumentCountAsync( new SearchRequestOptions { ClientRequestId = id }); // TODO: #10604 - C# generator doesn't properly support ClientRequestId yet // (Assertion is here to remind us to fix this when we do - just // change to AreEqual and re-record) Assert.AreNotEqual(id.ToString(), response.GetRawResponse().ClientRequestId); }
public async Task RecentlyIndexedDynamicDocument() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); Hotel document = SearchResources.TestDocuments[0]; await resources.GetSearchClient().IndexDocumentsAsync( IndexDocumentsBatch.Upload(new[] { document.AsDocument() })); await resources.WaitForIndexingAsync(); Response <Hotel> response = await resources.GetQueryClient().GetDocumentAsync <Hotel>(document.HotelId); Assert.AreEqual(document.HotelId, response.Value.HotelId); }
public async Task Champion_ManualFlushing() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments(1000); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlush = false }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); Assert.Zero((int)await resources.GetSearchClient().GetDocumentCountAsync()); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Convenience_Merge() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); ConcurrentQueue <IndexDocumentsAction <SimpleDocument> > failures = TrackFailures(indexer); await indexer.MergeDocumentsAsync(data); await indexer.FlushAsync(); Assert.AreEqual(data.Length, failures.Count); }
public async Task Behavior_Split() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); BatchingSearchClient client = GetBatchingSearchClient(resources); SimpleDocument[] data = SimpleDocument.GetDocuments(BatchSize); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender <SimpleDocument>(); AssertNoFailures(indexer); client.SplitNextBatch = true; await indexer.UploadDocumentsAsync(data); await indexer.FlushAsync(); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task Dispose_UndisposedNoCrash() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>() { AutoFlush = false }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); }
public async Task VerifyRoundtrip <T>( Func <T, string> getKey, T document, T expected = default, GetDocumentOptions options = null) { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); await resources.GetSearchClient().IndexDocumentsAsync <T>( IndexDocumentsBatch.Upload <T>(new[] { document })); await resources.WaitForIndexingAsync(); Response <T> response = await resources.GetQueryClient().GetDocumentAsync <T>(getKey(document), options); // Only validate expected properties AssertApproximate(expected ?? document, response.Value); }
public async Task KeyFieldAccessor_Error() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); Hotel[] data = SearchResources.TestDocuments; await using SearchIndexingBufferedSender <Hotel> indexer = client.CreateIndexingBufferedSender <Hotel>(); AssertNoFailures(indexer); try { await indexer.UploadDocumentsAsync(data); } catch (InvalidOperationException ex) { StringAssert.Contains(nameof(Hotel), ex.Message); } }
public async Task Structs() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); SimpleStructHotel document = new SimpleStructHotel { HotelId = "4", HotelName = "Value Inn" }; await resources.GetSearchClient().IndexDocumentsAsync( IndexDocumentsBatch.Upload(new[] { document })); await resources.WaitForIndexingAsync(); SearchClient client = resources.GetQueryClient(); Response <SimpleStructHotel> response = await client.GetDocumentAsync <SimpleStructHotel>(document.HotelId); Assert.AreEqual(document, response.Value); }
public async Task Notifications_Added() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); SearchClient client = resources.GetSearchClient(); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument>()); int adds = 0; indexer.ActionAddedAsync += (a, c) => { adds++; return(Task.CompletedTask); }; await indexer.UploadDocumentsAsync(data); await DelayAsync(EventDelay, EventDelay); Assert.AreEqual(data.Length, adds); }
public async Task AutoFlushInterval_TinyInterval() { await using SearchResources resources = await SearchResources.CreateWithEmptyIndexAsync <SimpleDocument>(this); BatchingSearchClient client = GetBatchingSearchClient(resources); SimpleDocument[] data = SimpleDocument.GetDocuments((int)(BatchSize * 1.5)); await using SearchIndexingBufferedSender <SimpleDocument> indexer = client.CreateIndexingBufferedSender( new SearchIndexingBufferedSenderOptions <SimpleDocument> { AutoFlushInterval = TimeSpan.FromMilliseconds(10) }); AssertNoFailures(indexer); await indexer.UploadDocumentsAsync(data); await DelayAsync(TimeSpan.FromSeconds(5), EventDelay); await WaitForDocumentCountAsync(resources.GetSearchClient(), data.Length); }
public async Task EmptyValuesDynamicDocument() { await using SearchResources resources = await SearchResources.CreateWithEmptyHotelsIndexAsync(this); SearchDocument document = new SearchDocument { ["hotelId"] = "1", ["hotelName"] = null, ["tags"] = new object[0], ["parkingIncluded"] = null, ["lastRenovationDate"] = null, ["rating"] = null, ["location"] = null, ["geoLocation"] = null, ["address"] = null, ["rooms"] = new[] { new SearchDocument { ["baseRate"] = null, ["bedOptions"] = null, ["sleepsCount"] = null, ["smokingAllowed"] = null, ["tags"] = new object[0] } } }; await resources.GetSearchClient().IndexDocumentsAsync( IndexDocumentsBatch.Upload(new[] { document })); await resources.WaitForIndexingAsync(); Response <SearchDocument> response = await resources.GetQueryClient().GetDocumentAsync <SearchDocument>((string)document["hotelId"]); Assert.AreEqual(document["hotelId"], response.Value["hotelId"]); }