public void IndexThanDeleteDocumentByObject() { var newIndex = IntegrationSetup.CreateNewIndexWithData(this.Client); var newDocument = new ElasticsearchProject { Country = "Mozambique", Followers = new List <Person>(), Id = DateTime.Now.Millisecond + 1500, //try to get this example out of the way of existing test data Name = "Test Document for 'IndexDocument' test" }; //act //index the new item this.Client.Index(newDocument, i => i.Refresh().Index(newIndex)); //assert //grab document back from the index and make sure it is the same document var foundDocument = this.Client.Source <ElasticsearchProject>(newDocument.Id, newIndex); //Assert.Equal(newDocument.Country, foundDocument.Country); Assert.AreEqual(newDocument.Followers.Count, foundDocument.Followers.Count); Assert.AreEqual(newDocument.Id, foundDocument.Id); Assert.AreEqual(newDocument.Name, foundDocument.Name); //act //now remove the item that was added this.Client.Delete <ElasticsearchProject>(f => f.IdFrom(newDocument).Refresh().Index(newIndex)); //assert //make sure getting by id returns nothing foundDocument = this.Client.Source <ElasticsearchProject>(newDocument.Id, newIndex); Assert.Null(foundDocument); }
public void GetDocumentById() { var newIndex = IntegrationSetup.CreateNewIndexWithData(this.Client); var firstName = NestTestData.Data.First().Followers.First().FirstName.ToLowerInvariant(); var queryResults = this.Client.Search <ElasticsearchProject>(s => s .Index(newIndex) .Query(q => q.Fuzzy(fq => fq.OnField(p => p.Followers.First().FirstName).Value(firstName + "x")) ) ); Assert.Greater(queryResults.Total, 0); var hit = queryResults.HitsMetaData.Hits.First(); var documentToFind = hit.Source; //act //attempt to grab the same document using the document's id var foundDocument = this.Client.Source <ElasticsearchProject>(hit.Id, newIndex); //assert //make sure that these are in fact the same documents Assert.AreEqual(documentToFind.Country, foundDocument.Country); Assert.AreEqual(documentToFind.Followers.Count, foundDocument.Followers.Count); Assert.AreEqual(documentToFind.Id, foundDocument.Id); Assert.AreEqual(documentToFind.Name, foundDocument.Name); }
public void RemoveAllByPassingAsIEnumerableOfBulkParameters() { var newIndex = IntegrationSetup.CreateNewIndexWithData(this.Client); var result = this.Client.Search <ElasticsearchProject>(s => s .Index(newIndex) .MatchAll() ); Assert.IsNotNull(result); Assert.IsNotNull(result.Documents); var totalSet = result.Documents.Count(); Assert.Greater(totalSet, 0); var totalResults = result.Total; var deleteResult = this.Client.Bulk(b => b .FixedPath(newIndex) .DeleteMany(result.Documents, (p, o) => p.VersionType(VersionType.Internal)) .Refresh() ); Assert.True(deleteResult.IsValid, deleteResult.ConnectionStatus.ResponseRaw.Utf8String()); Assert.False(deleteResult.Errors, deleteResult.ConnectionStatus.ResponseRaw.Utf8String()); Assert.IsNotEmpty(deleteResult.Items); result = this.Client.Search <ElasticsearchProject>(s => s .Index(newIndex) .MatchAll() ); Assert.IsNotNull(result); Assert.IsNotNull(result.Documents); Assert.AreEqual(result.Total, totalResults - totalSet); }
public void KickOffOnce(IElasticClient client, bool oneRandomCall = false) => Responses = CallOnce(() => new LazyResponses(async() => { if (TestClient.Configuration.RunIntegrationTests) { IntegrationSetup?.Invoke(client, CallUniqueValues); CalledSetup = true; } var randomCall = Random.Number(0, 3); var dict = new Dictionary <ClientMethod, IResponse>(); if (!oneRandomCall || randomCall == 0) { Call(client, dict, ClientMethod.Fluent, v => _fluent(v, client)); } if (!oneRandomCall || randomCall == 1) { await CallAsync(client, dict, ClientMethod.FluentAsync, v => _fluentAsync(v, client)); } if (!oneRandomCall || randomCall == 2) { Call(client, dict, ClientMethod.Initializer, v => _request(v, client)); } if (!oneRandomCall || randomCall == 3) { await CallAsync(client, dict, ClientMethod.InitializerAsync, v => _requestAsync(v, client)); } if (TestClient.Configuration.RunIntegrationTests) { foreach (var v in CallUniqueValues.Values.SelectMany(d => d)) { IntegrationTeardown?.Invoke(client, CallUniqueValues); } CalledTeardown = true; } return(dict); }));
public void Setup() { var client = new ElasticClient( //ElasticsearchConfiguration.Settings(hostOverride: new Uri("http://localhost:9200")) ElasticsearchConfiguration.Settings() ); try { IntegrationSetup.CreateTestIndex(client, ElasticsearchConfiguration.DefaultIndex); IntegrationSetup.CreateTestIndex(client, ElasticsearchConfiguration.DefaultIndex + "_clone"); IntegrationSetup.IndexDemoData(client); } catch (Exception) { throw; } }
public void RemoveAllByQueryOverIndices() { var newIndex = IntegrationSetup.CreateNewIndexWithData(this.Client); var result = this.Client.Search <ElasticsearchProject>(s => s .Index(newIndex) .Query(q => q.Term(f => f.Name, "elasticsearch.pm")) ); Assert.IsNotNull(result); Assert.IsNotNull(result.Documents); var totalSet = result.Documents.Count(); Assert.Greater(totalSet, 0); var totalResults = result.Total; this.Client.DeleteByQuery <ElasticsearchProject>(d => d .Indices(new[] { newIndex }) .Query(q => q .Term(f => f.Name, "elasticsearch.pm") ) ); result = this.Client.Search <ElasticsearchProject>(s => s .Index(newIndex) .Query(q => q.Term(f => f.Name, "elasticsearch.pm")) ); Assert.IsNotNull(result); Assert.IsNotNull(result.Documents); Assert.True(result.Total == 0); //make sure we did not delete all. var countResult = this.Client.Count <ElasticsearchProject>(c => c .Index(newIndex) .Query(q => q.MatchAll()) ); Assert.True(countResult.IsValid); Assert.Greater(countResult.Count, 0); }
public void IndexThanDeleteDocumentById() { var newIndex = IntegrationSetup.CreateNewIndexWithData(this.Client); var newDocument = new ElasticsearchProject { Country = "Mozambique", Followers = new List <Person>(), Id = DateTime.Now.Millisecond + 1500, Name = "Test Document for 'IndexDocument' test" }; this.Client.Index <ElasticsearchProject>(newDocument, i => i.Index(newIndex).Refresh()); var foundDocument = this.Client.Source <ElasticsearchProject>(newDocument.Id, newIndex); Assert.AreEqual(newDocument.Followers.Count, foundDocument.Followers.Count); Assert.AreEqual(newDocument.Id, foundDocument.Id); Assert.AreEqual(newDocument.Name, foundDocument.Name); var response = this.Client.Delete <ElasticsearchProject>(f => f.Id(newDocument.Id).Index(newIndex).Refresh()); foundDocument = this.Client.Source <ElasticsearchProject>(newDocument.Id, newIndex); Assert.Null(foundDocument); }
public void MultiPercolate_ReturnsExpectedResults() { //lets start fresh using a new index var indexName = ElasticsearchConfiguration.NewUniqueIndexName(); IntegrationSetup.CreateTestIndex(this.Client, indexName); // lets register several percolators in our new index that do a term match // on document name == indexname // we associate some metadata with the percolator so that we can later filter // the ones we want to execute easier foreach (var i in Enumerable.Range(0, 10)) { var registerPercolator = this.Client.RegisterPercolator(new RegisterPercolatorRequest(indexName, "my-percolator-" + i) { Query = new TermQuery { Field = Property.Path <ElasticsearchProject>(p => p.Name.Suffix("sort")), Value = indexName }, MetaData = new Dictionary <string, object> { { "order", i } } }); registerPercolator.IsValid.Should().BeTrue(); } // Set up 2 projects to index both with indexName as Name var projects = Enumerable.Range(0, 2) .Select(i => new ElasticsearchProject { Id = 1337 + i, Name = indexName }) .ToList(); this.Client.IndexMany(projects, indexName); this.Client.Refresh(r => r.Index(indexName)); //Now we kick of multiple percolations var multiPercolateResponse = this.Client.MultiPercolate(mp => mp //provding document in the percolate request .Percolate <ElasticsearchProject>(perc => perc .Index(indexName) .Document(projects.First()) ) //providing an existing document .Percolate <ElasticsearchProject>(perc => perc .Index(indexName) .Id(projects.Last().Id) ) //do a count percolation but force it to only //run on two of the 10 percolators .Count <ElasticsearchProject>(cp => cp .Id(projects.Last().Id) .Index(indexName) .Query(ff => ff.Term("order", 3) || ff.Term("order", 4) ) ) //Force an error by providing a bogus type-name .Percolate <ElasticsearchProject>(perc => perc .Index(indexName) .Type("bogus!") .Id(projects.Last().Id) ) ); multiPercolateResponse.IsValid.Should().BeTrue(); var percolateResponses = multiPercolateResponse.Responses.ToList(); percolateResponses.Should().NotBeEmpty().And.HaveCount(4); var percolateResponse = percolateResponses[0]; percolateResponse.Total.Should().Be(10); percolateResponse.Matches.Should().HaveCount(10); var existingResponse = percolateResponses[1]; existingResponse.Total.Should().Be(10); existingResponse.Matches.Should().HaveCount(10); var countResponse = percolateResponses[2]; countResponse.Total.Should().Be(2); countResponse.Matches.Should().BeNull(); var errorResponse = percolateResponses[3]; errorResponse.Total.Should().Be(0); errorResponse.ServerError.Error.Should().NotBeNullOrWhiteSpace(); }
private void ResetIndexes() { IntegrationSetup.TearDown(); IntegrationSetup.Setup(); }
protected override void ResetIndexes() { IntegrationSetup.TearDown(); IntegrationSetup.Setup(); }
public void PercolateHighlighting() { //lets start fresh using a new index var indexName = ElasticsearchConfiguration.NewUniqueIndexName(); IntegrationSetup.CreateTestIndex(this.Client, indexName); var property = Property.Path <ElasticsearchProject>(p => p.Content); // lets register several percolators in our new index that do a term match // on document name == indexname // we associate some metadata with the percolator so that we can later filter // the ones we want to execute easier foreach (var i in Enumerable.Range(0, 10)) { var registerPercolator = this.Client.RegisterPercolator(new RegisterPercolatorRequest(indexName, "high-percolator-" + i) { Query = new MatchQuery { Field = property, Query = i % 2 == 0 ? "brown dog" : "lazy dog" } }); registerPercolator.IsValid.Should().BeTrue(); } var content = "The quick brown fox jumps over the lazy dog"; var percolateResponse = this.Client.Percolate <ElasticsearchProject>(perc => perc .Index(indexName) .Document(new ElasticsearchProject { Content = content }) .Highlight(10, h => h .OnFields(fd => fd .OnField(p => p.Content) ) ) ); percolateResponse.Total.Should().Be(10); percolateResponse.Matches.Should().HaveCount(10); var field = percolateResponse.Infer.PropertyPath(property); foreach (var match in percolateResponse.Matches) { match.Highlight.Should().NotBeEmpty().And.ContainKey(field); match.Highlight[field].Should().NotBeEmpty() .And.OnlyContain(l => l.Any()); } var multiPercolateResponse = this.Client.MultiPercolate(mp => mp .Percolate <ElasticsearchProject>(perc => perc .Index(indexName) .Document(new ElasticsearchProject { Content = content }) .Highlight(10, h => h .OnFields(fd => fd .OnField(p => p.Content) ) ) ) ); multiPercolateResponse.IsValid.Should().BeTrue(); multiPercolateResponse.Responses.Should().NotBeEmpty().And.HaveCount(1); percolateResponse = multiPercolateResponse.Responses.First(); percolateResponse.Total.Should().Be(10); percolateResponse.Matches.Should().HaveCount(10); foreach (var match in percolateResponse.Matches) { match.Highlight.Should().NotBeEmpty().And.ContainKey(field); match.Highlight[field].Should().NotBeEmpty() .And.OnlyContain(l => l.Any()); } }