public void TestIndexTimeout() { var timeout = 1; var s = new ConnectionSettings(Test.Default.Host, Test.Default.Port, timeout) .SetDefaultIndex(Test.Default.DefaultIndex) .SetMaximumAsyncConnections(Test.Default.MaximumAsyncConnections) .UsePrettyResponses(); var client = new ElasticClient(s); var newProject = new ElasticSearchProject { Name = "COBOLES", //COBOL ES client ? }; var t = client.IndexAsync<ElasticSearchProject>(newProject); t.Wait(1000); var r = t.Result; Assert.True(r.IsValid); Assert.IsNotNullOrEmpty(r.Id); var cs = r.ConnectionStatus; Assert.False(cs.Success); Assert.NotNull(cs.Error); Assert.NotNull(cs.Error.OriginalException); Trace.WriteLine(cs.Error.OriginalException); Assert.IsNotNullOrEmpty(cs.Error.ExceptionMessage); Assert.IsTrue(cs.Error.OriginalException is WebException); var we = cs.Error.OriginalException as WebException; Assert.IsTrue(cs.Error.ExceptionMessage.Contains("The request was canceled")); Assert.IsTrue(we.Status == WebExceptionStatus.RequestCanceled); Assert.True(t.IsCompleted, "task did not complete"); Assert.False(t.IsFaulted, "task was faulted, wich means the exception did not cleanly pass to ConnectionStatus"); }
public void ShouldFailoverOnThriftConnectionsUsingSniff() { var uris = new [] { new Uri("http://INVALID_HOST"), new Uri("http://INVALID_HOST2"), new Uri("http://localhost:9500"), }; var connectionPool = new SniffingConnectionPool(uris, randomizeOnStartup: false); var settings = new ConnectionSettings(connectionPool, ElasticsearchConfiguration.DefaultIndex) .SniffOnStartup() .ExposeRawResponse() .SetTimeout(2000); var client = new ElasticClient(settings, new ThriftConnection(settings)); var results = client.Search<dynamic>(s => s.MatchAll()); results.IsValid.Should().BeTrue("{0}", results.ConnectionStatus.ToString()); results.ConnectionStatus.NumberOfRetries.Should().Be(0); var u = new Uri(results.ConnectionStatus.RequestUrl); u.Port.Should().Be(9500); results = client.Search<dynamic>(s => s.MatchAll()); results.IsValid.Should().BeTrue("{0}", results.ConnectionStatus.ToString()); results.ConnectionStatus.NumberOfRetries.Should().Be(0); u = new Uri(results.ConnectionStatus.RequestUrl); u.Port.Should().Be(9501); }
public void TestIntLookup() { var client = new ElasticClient(new ConnectionSettings(new Uri("http://localhost:9200"))); var expected = 12; var id = new IdResolver().GetIdFor(new IntIdClass { Id = expected }); StringAssert.AreEqualIgnoringCase(expected.ToString(), id); }
public void TestAlternateIdLookup() { var client = new ElasticClient(new ConnectionSettings(new Uri("http://localhost:9200"))); var expectedGuid = Guid.NewGuid(); var id = new IdResolver().GetIdFor(new AlternateIdClass { Guid = expectedGuid }); StringAssert.AreEqualIgnoringCase(expectedGuid.ToString(), id); }
public async void SniffOnStartShouldOnlyHit9200_WithoutPing_Async() { var seeds = new[] { ElasticsearchConfiguration.CreateBaseUri(9202), ElasticsearchConfiguration.CreateBaseUri(9201), ElasticsearchConfiguration.CreateBaseUri(9200) }; var sniffingConnectionPool = new SniffingConnectionPool(seeds, randomizeOnStartup: false); var connectionSettings = new ConnectionSettings(sniffingConnectionPool) .SniffOnStartup(); var client = new ElasticClient(connectionSettings); var rootNode = await client.RootNodeInfoAsync(); var metrics = rootNode.ConnectionStatus.Metrics; //When the connectionpool is used for the first time the sniff call should already //know only 9200 is on and live, no need to ping metrics.Requests.Count.Should().Be(1); metrics.Requests[0].Node.Port.Should().Be(9200); metrics.Requests[0].RequestType.Should().Be(RequestType.ElasticsearchCall); for (var i = 0; i < 3; i++) { rootNode = await client.RootNodeInfoAsync(); metrics = rootNode.ConnectionStatus.Metrics; metrics.Requests.Count.Should().Be(1); metrics.Requests[0].Node.Port.Should().Be(9200); metrics.Requests[0].RequestType.Should().Be(RequestType.ElasticsearchCall); } }
public static void SearchResult(SearchResultActionArgs args) { try { BaseAction.Setup(); var status = GetValidConnectionStatus(); var settings = BaseAction.Settings(); var client = new ElasticClient(settings, new InMemoryConnection(settings, status)); for (var i = 0; i < 100; i++) { var result = client.Search<Person>(s => s .Size(2000) .Index(BaseAction.DefaultIndex) //.Types(typeof (Person), typeof (ElasticSearchProject)) .MatchAll() ); result.Documents.Count(); } } finally { BaseAction.TearDown(); } }
public void WhenPostExceedsHttpLimit_DoNotRetry_UsingConnectionPooling() { var pool = new StaticConnectionPool(new [] { new Uri("http://localhost:9200"), new Uri("http://127.0.0.1:9200"), }); var settings = new ConnectionSettings(pool); var client = new ElasticClient(settings); var index = ElasticsearchConfiguration.NewUniqueIndexName(); var projects = NestTestData.Data; var people = NestTestData.People; var boolTerms = NestTestData.BoolTerms; var bulk = client.Bulk(b => b .FixedPath(index) .IndexMany(projects) .IndexMany(people) .IndexMany(boolTerms) ); bulk.IsValid.Should().BeFalse(); bulk.ConnectionStatus.NumberOfRetries.Should().Be(0); }
public void FailoverShouldOnlyPingDeadNodes() { var seeds = new[] { ElasticsearchConfiguration.CreateBaseUri(9202), ElasticsearchConfiguration.CreateBaseUri(9201), ElasticsearchConfiguration.CreateBaseUri(9200) }; var sniffingConnectionPool = new SniffingConnectionPool(seeds, randomizeOnStartup: false); var connectionSettings = new ConnectionSettings(sniffingConnectionPool); var client = new ElasticClient(connectionSettings); var rootNode = client.RootNodeInfo(); var metrics = rootNode.ConnectionStatus.Metrics; //ping 9202 + 9201 + 9200 and call 9200 metrics.Requests.Count.Should().Be(4); metrics.Requests[0].Node.Port.Should().Be(9202); metrics.Requests[0].RequestType.Should().Be(RequestType.Ping); metrics.Requests[0].EllapsedMilliseconds.Should().BeLessOrEqualTo(1100); metrics.Requests[1].Node.Port.Should().Be(9201); metrics.Requests[1].RequestType.Should().Be(RequestType.Ping); metrics.Requests[1].EllapsedMilliseconds.Should().BeLessOrEqualTo(1100); metrics.Requests[2].Node.Port.Should().Be(9200); metrics.Requests[2].RequestType.Should().Be(RequestType.Ping); metrics.Requests[2].EllapsedMilliseconds.Should().BeLessOrEqualTo(1100); metrics.Requests[3].Node.Port.Should().Be(9200); metrics.Requests[3].RequestType.Should().Be(RequestType.ElasticsearchCall); metrics.Requests[3].EllapsedMilliseconds.Should().BeLessOrEqualTo(1100); rootNode = client.RootNodeInfo(); metrics = rootNode.ConnectionStatus.Metrics; metrics.Requests.Count.Should().Be(1); metrics.Requests[0].Node.Port.Should().Be(9200); metrics.Requests[0].RequestType.Should().Be(RequestType.ElasticsearchCall); rootNode = client.RootNodeInfo(); metrics = rootNode.ConnectionStatus.Metrics; metrics.Requests.Count.Should().Be(1); metrics.Requests[0].Node.Port.Should().Be(9200); metrics.Requests[0].RequestType.Should().Be(RequestType.ElasticsearchCall); rootNode = client.RootNodeInfo(); metrics = rootNode.ConnectionStatus.Metrics; metrics.Requests.Count.Should().Be(1); metrics.Requests[0].Node.Port.Should().Be(9200); metrics.Requests[0].RequestType.Should().Be(RequestType.ElasticsearchCall); rootNode = client.RootNodeInfo(); metrics = rootNode.ConnectionStatus.Metrics; metrics.Requests.Count.Should().Be(1); metrics.Requests[0].Node.Port.Should().Be(9200); metrics.Requests[0].RequestType.Should().Be(RequestType.ElasticsearchCall); }
public void ConnectionException_WithThrowingClient_Async() { var uri = ElasticsearchConfiguration.CreateBaseUri(9494); var client = new ElasticClient(new ConnectionSettings(uri) .SetTimeout(500) .ThrowOnElasticsearchServerExceptions()); Assert.Throws<WebException>(async () => await client.RootNodeInfoAsync()); }
public void TestInheritedLookup() { var client = new ElasticClient(new ConnectionSettings(new Uri("http://localhost:9200"))); var expected = new InheritedIdClass() { Id = 123 }; var id = new IdResolver().GetIdFor(expected); id = new IdResolver().GetIdFor(expected); Assert.AreEqual(expected.Id.ToString(), id); }
public void DeserializeOfStreamDoesNotHoldACopyOfTheResponse() { var uri = ElasticsearchConfiguration.CreateBaseUri(); var settings = new ConnectionSettings(uri, ElasticsearchConfiguration.DefaultIndex); IElasticClient client = new ElasticClient(settings); var results = client.Search<ElasticsearchProject>(s => s.MatchAll()); }
public void IdPropertyNotMapped_IdIsInferred() { var settings = new ConnectionSettings(); var client = new ElasticClient(settings, connection: new InMemoryConnection()); var project = new ElasticsearchProject { Id = 123 }; Assert.AreEqual(project.Id.ToString(), client.Infer.Id<ElasticsearchProject>(project)); }
public void NoSearchResults() { //test teardown will delete defaultindex_* indices //process id makes it so we can run these tests concurrently using NCrunch var index = ElasticsearchConfiguration.DefaultIndex + "_posts_" + Process.GetCurrentProcess().Id.ToString(); var client = new ElasticClient(this._settings, new InMemoryConnection(this._settings)); var streamId = new Guid("8d00cf65-bf84-4035-9adb-695b1366304c"); var approved = true; var response = client.Count<MediaStreamEntry>(c=>c .Index("StreamEntry") .Query(x => x.Bool( b => b.Must ( x.Term(f => f.StreamId, streamId.ToString()) , x.Term(f => f.ApprovalSettings.Approved, approved) ) ) ) ); //Approval settings should not appear twice just because we are spawing the nested queries of wrong lambda parameter (x) Assert.AreEqual(1, Regex.Matches(response.ConnectionStatus.Request, @"approvalSettings\.approved").Count, response.ConnectionStatus.Request); //either use the lambda overload response = client.Count<MediaStreamEntry>(c=>c .Index("StreamEntry") .Query(x => x.Bool( b => b.Must ( m=> m.Term(f => f.StreamId, streamId.ToString()) , m => m.Term(f => f.ApprovalSettings.Approved, approved) ) ) ) ); //now we only see the query once Assert.AreEqual(1, Regex.Matches(response.ConnectionStatus.Request, @"approvalSettings\.approved").Count); //or use the static Query<MediaStreamEntry> response = client.Count<MediaStreamEntry>(c=>c .Index("StreamEntry") .Query(x => x.Bool( b => b.Must ( Query<MediaStreamEntry>.Term(f => f.StreamId, streamId) , Query<MediaStreamEntry>.Term(f => f.ApprovalSettings.Approved, approved) ) ) ) ); //now we still only see the query once Assert.AreEqual(1, Regex.Matches(response.ConnectionStatus.Request, @"approvalSettings\.approved").Count); }
public void TestConnectSuccessWithUri() { var settings = new ConnectionSettings(ElasticsearchConfiguration.CreateBaseUri(), "index"); var client = new ElasticClient(settings); var result = client.RootNodeInfo(); Assert.True(result.IsValid); Assert.NotNull(result.ConnectionStatus.HttpStatusCode); }
public void EmptySearch_NoDefaultIndex_DoesNotThrow() { Assert.DoesNotThrow(()=> { var client = new ElasticClient(); client.Search<dynamic>(new SearchRequest { }); }); }
public void TestUsesDefaultPropertyNameResolver() { var settings = new ConnectionSettings(UnitTestDefaults.Uri, UnitTestDefaults.DefaultIndex) .SetDefaultPropertyNameInferrer(p => p); var client = new ElasticClient(settings); Expression<Func<UserItemData, object>> exp = (m) => m.UserLabels; var propertyName = client.Infer.PropertyPath(exp); Assert.AreEqual("UserLabels", propertyName); }
public void IndexExistShouldNotThrowOn404() { var host = Test.Default.Host; if (Process.GetProcessesByName("fiddler").Any()) host = "ipv4.fiddler"; var connectionPool = new SniffingConnectionPool(new[] { new Uri("http://{0}:9200".F(host)) }); var settings = new ConnectionSettings(connectionPool, ElasticsearchConfiguration.DefaultIndex) .SniffOnStartup(); var client = new ElasticClient(settings); }
public static void Setup() { var client = new ElasticClient(ElasticsearchConfiguration.Settings(hostOverride: new Uri("http://localhost:9200"))); //uncomment the next line if you want to see the setup in fiddler too //var client = ElasticsearchConfiguration.Client; var projects = NestTestData.Data; var people = NestTestData.People; var boolTerms = NestTestData.BoolTerms; try { var createIndexResult = client.CreateIndex(ElasticsearchConfiguration.DefaultIndex, c => c .NumberOfReplicas(0) .NumberOfShards(1) .AddMapping<ElasticsearchProject>(m => m .MapFromAttributes() .Properties(p => p .String(s => s.Name(ep => ep.Content).TermVector(TermVectorOption.WithPositionsOffsetsPayloads)))) .AddMapping<Person>(m => m.MapFromAttributes()) .AddMapping<BoolTerm>(m => m.Properties(pp => pp .String(sm => sm.Name(p => p.Name1).Index(FieldIndexOption.NotAnalyzed)) .String(sm => sm.Name(p => p.Name2).Index(FieldIndexOption.NotAnalyzed)) )) ); var createAntotherIndexResult = client.CreateIndex(ElasticsearchConfiguration.DefaultIndex + "_clone", c => c .NumberOfReplicas(0) .NumberOfShards(1) .AddMapping<ElasticsearchProject>(m => m .MapFromAttributes() .Properties(p => p .String(s => s.Name(ep => ep.Content).TermVector(TermVectorOption.WithPositionsOffsetsPayloads)))) .AddMapping<Person>(m => m.MapFromAttributes()) .AddMapping<BoolTerm>(m => m.Properties(pp => pp .String(sm => sm.Name(p => p.Name1).Index(FieldIndexOption.NotAnalyzed)) .String(sm => sm.Name(p => p.Name2).Index(FieldIndexOption.NotAnalyzed)) )) ); var bulkResponse = client.Bulk(b => b .IndexMany(projects) .IndexMany(people) .IndexMany(boolTerms) .Refresh() ); } catch (Exception e) { throw; } }
public CustomConvertersTests() { var settings = new ConnectionSettings(new Uri("http://localhost:9200")) .SetDefaultIndex("nest_test_data") .AddContractJsonConverters( t => typeof(Enum).IsAssignableFrom(t) ? new Newtonsoft.Json.Converters.StringEnumConverter() : null, t => typeof(NestedObject).IsAssignableFrom(t) ? new CustomConverter() : null ); _serializationClient = new ElasticClient(settings); }
public void MapNumericIdProperty() { var settings = new ConnectionSettings() .MapIdPropertyFor<ElasticsearchProject>(p => p.LongValue); var client = new ElasticClient(settings, connection: new InMemoryConnection()); var project = new ElasticsearchProject { LongValue = 123 }; Assert.AreEqual(project.LongValue.ToString(), client.Infer.Id<ElasticsearchProject>(project)); }
public void MapStringIdProperty() { var settings = new ConnectionSettings() .MapIdPropertyFor<ElasticsearchProject>(p => p.Name); var client = new ElasticClient(settings, connection: new InMemoryConnection()); var project = new ElasticsearchProject { Name = "foo" }; Assert.AreEqual(project.Name, client.Infer.Id<ElasticsearchProject>(project)); }
public void Execute() { Console.WriteLine("CLIENT SAMPLE"); var client = new ElasticClient<Tweet>(defaultHost: "localhost", defaultPort: 9200); var firstTweet = new Tweet { User = "******", Message = "trying out Elastic Search" }; var anotherTweet = new Tweet { User = "******", Message = "one more message" }; IndexResult indexResult1 = client.Index(new IndexCommand(index: "twitter", type: "tweet", id: "1").Refresh(), firstTweet); IndexResult indexResult2 = client.Index(Commands.Index(index: "twitter", type: "tweet", id: "2").Refresh(), anotherTweet); GetResult<Tweet> getResult = client.Get(new GetCommand(index: "twitter", type: "tweet", id: "2")); SearchResult<Tweet> searchResult = client.Search(new SearchCommand("twitter", "tweet"), new QueryBuilder<Tweet>() .Query(q => q .Term(t => t .Field(tweet => tweet.User) .Value("testUser") .Boost(2) ) )); DeleteResult deleteResult = client.Delete(Commands.Delete(index: "twitter")); PrintIndexResult(indexResult1); PrintIndexResult(indexResult2); PrintGetResult(getResult); PrintSearchResults(searchResult); PrintDeleteResult(deleteResult); Console.WriteLine("Press any key"); Console.ReadKey(); }
private string SerializeUsing(Flight flight, DateTimeZoneHandling handling) { var settings = new ConnectionSettings() .SetDefaultPropertyNameInferrer(p=>p) .SetJsonSerializerSettingsModifier(s => { s.DateFormatHandling = DateFormatHandling.IsoDateFormat; s.DateTimeZoneHandling = handling; }); var client = new ElasticClient(settings); return client.Serializer.Serialize(flight).Utf8String(); }
public void SearchWithTypesAndIndicesSetShouldNotThrow_WithNoDefaultIndex() { Assert.DoesNotThrow(()=> { var client = new ElasticClient(); client.Search<dynamic>(new SearchRequest { Indices = new IndexNameMarker[] { "index" }, Types = new TypeNameMarker[] { "type" } }); }); }
public void CanCreateConnectionWithCustomQueryStringParameters() { // Arrange var uri = new Uri("http://localhost"); var query = new NameValueCollection { { "authToken", "ABCDEFGHIJK" } }; var connectionSettings = new ConnectionSettings(uri, "index").SetGlobalQueryStringParameters(query); var client = new ElasticClient(connectionSettings, new InMemoryConnection(connectionSettings)); var result = client.RootNodeInfo(); // Assert Assert.AreEqual(result.ConnectionStatus.RequestUrl, "http://localhost/?authToken=ABCDEFGHIJK"); }
public void CanCreateConnectionWithPathAndCustomQueryStringParameters() { // Arrange var uri = new Uri("http://localhost:9000"); var query = new NameValueCollection { { "authToken", "ABCDEFGHIJK" } }; var connectionSettings = new ConnectionSettings(uri, "index").SetGlobalQueryStringParameters(query); var client = new ElasticClient(connectionSettings, new TestConnection(connectionSettings)); var result = client.IndexExists(ie=>ie.Index("index")); // Assert Assert.AreEqual(result.ConnectionStatus.RequestUrl, "http://localhost:9000/index?authToken=ABCDEFGHIJK"); }
public void Calling_Refresh_UsingHttpClientConnection_DoesNotThrow() { var settings = ElasticsearchConfiguration.Settings() .EnableCompressedResponses(true); var connection = new HttpClientConnection(settings); var client = new ElasticClient(settings, connection: connection); Assert.DoesNotThrow(()=> client.Refresh()); Assert.DoesNotThrow(()=> client.Get<ElasticsearchProject>(NestTestData.Data.First().Id)); Assert.DoesNotThrow(()=> client.Ping()); }
public static void Search(SearchActionArgs args) { var settings = BaseAction.Settings(); var client = new ElasticClient(settings, new InMemoryConnection(settings)); for (var i = 0; i < 10000; i++) { var term = i.ToString(); var result = client.Search<ElasticSearchProject>(s => s .Query(q=> q.Term(p=>p.Name, term) || q.Term(p=>p.Followers.First().FirstName, term) ) ); } }
public void ServerError_Is_Set_ClientThat_DoesNotThow_AndDoesNotExposeRawResponse() { var uri = ElasticsearchConfiguration.CreateBaseUri(); var client = new ElasticClient(new ConnectionSettings(uri).ExposeRawResponse(false)); Assert.DoesNotThrow(() => { var result = client.Search<ElasticsearchProject>(s => s.QueryRaw(@"{ ""badjson"": {} }")); result.IsValid.Should().BeFalse(); result.ConnectionStatus.HttpStatusCode.Should().Be(400); var e = result.ServerError; e.Should().NotBeNull(); e.ExceptionType.Should().Contain("SearchPhaseExecutionException"); }); }
public void UsePrettyResponsesShouldSurviveUrlModififications() { var settings = new ConnectionSettings(UnitTestDefaults.Uri, UnitTestDefaults.DefaultIndex) .UsePrettyResponses(); var connection = new InMemoryConnection(settings); var client = new ElasticClient(settings, connection); var r = client.ClusterHealth(h=>h.Level(LevelOptions.Cluster)); var u = new Uri(r.ConnectionStatus.RequestUrl); u.AbsolutePath.Should().StartWith("/_cluster/health"); u.Query.Should().Contain("level=cluster"); u.Query.Should().Contain("pretty=true"); }
static void Main(string[] args) { var local = new Uri("http://localhost:9200"); string indexName = "blog_post_author_index"; var settings = new ConnectionSettings(local).DefaultIndex(indexName); var elastic = new ElasticClient(settings); var res = elastic.ClusterHealth(); Console.WriteLine(res.Status); var blogPost = new BlogPost { Id = Guid.NewGuid(), Title = "First blog post", Body = "This is very long blog post!" }; if (!elastic.IndexExists(indexName).Exists) { var createIndexResponse = elastic.CreateIndex(indexName); var mappingBlogPost = elastic.Map <BlogPost>(s => s.AutoMap()); var mappingBlogAuthor = elastic.Map <Author>(s => s.AutoMap()); Console.WriteLine("createIndexResponse=" + createIndexResponse.IsValid); Console.WriteLine("mappingBlogPost=" + mappingBlogPost.IsValid); Console.WriteLine("mappingBlogAuthor=" + mappingBlogAuthor.IsValid); } IIndexResponse indexResponse = elastic.Index(blogPost, i => i .Index(indexName) .Type(typeof(BlogPost)) .Id(1) .Refresh()); Console.WriteLine("IIndexResponse=" + indexResponse.IsValid); //insert 10 documents for (var i = 2; i < 12; i++) { var blogPostNew = new BlogPost { Id = Guid.NewGuid(), Title = string.Format("title {0:000}", i), Body = string.Format("This is {0:000} very long blog post!", i) }; IIndexResponse bulkIndexReponse = elastic.Index(blogPostNew, p => p .Type(typeof(BlogPost)) .Id(i) .Refresh()); Console.WriteLine("bulk IIndexResponse=" + bulkIndexReponse.IsValid); } //Get document by id var result = elastic.Get <BlogPost>(new GetRequest(indexName, typeof(BlogPost), 16)); Console.WriteLine("Document id:" + result.Id); Console.WriteLine("Document fields:" + result.Fields); Console.WriteLine("Document Type:" + result.Type); Console.WriteLine("Document Found Status:" + result.Found); //delete document by id //var deleteResult = elastic.Delete(new DeleteRequest(indexName, typeof(BlogPost), 1)); //Console.WriteLine(deleteResult.Found); //Query search queries for match all var searchResult = elastic.Search <BlogPost>(sr => sr .From(0) .Size(5) .Query(q => q.MatchAll()) .Sort(ss => ss .Ascending(p => p.Title) .Field(f => f.Field(ff => ff.Title))) ); Console.WriteLine("Search results for Match All!! ==>"); Console.WriteLine(searchResult.Hits.Count()); foreach (var hit in searchResult.Hits) { Console.WriteLine(hit.Source); } //Query search results using Match var blogPostsForSearch = new[] { new BlogPost { Id = Guid.NewGuid(), Title = "test post 123", Body = "1" }, new BlogPost { Id = Guid.NewGuid(), Title = "test something 123", Body = "2" }, new BlogPost { Id = Guid.NewGuid(), Title = "read this post", Body = "3" } }; var id = 15; foreach (var blogPostSearch in blogPostsForSearch) { var insertRes = elastic.Index(blogPostSearch, p => p .Id(++id) .Refresh()); Console.WriteLine("Match SearchResults IIndexResponse=" + insertRes.IsValid); } var searchMatch = elastic.Search <BlogPost>(es => es .Query(q => q .Match(m => m .Field(f => f.Title) .Query("test post 123")))); Console.WriteLine("Search results for Match!! ==>"); Console.WriteLine(searchMatch.Hits.Count()); foreach (var hit in searchMatch.Hits) { Console.WriteLine(hit.Source); } //Match with AND Operator var searchMatchAnd = elastic.Search <BlogPost>(es => es .Query(q => q .Match(m => m .Field(f => f.Title) .Query("test post 123") .Operator(Operator.And)))); Console.WriteLine("Search results for Match!! ==>"); Console.WriteLine(searchMatchAnd.Hits.Count()); foreach (var hit in searchMatchAnd.Hits) { Console.WriteLine(hit.Source); } //MinimumShouldMatch var searchMinMatch = elastic.Search <BlogPost>(es => es .Query(q => q .Match(m => m .Field(f => f.Title) .Query("test post 123") .Operator(Operator.Or) .MinimumShouldMatch(2)))); Console.WriteLine("Search results for Min Match!! ==>"); Console.WriteLine(searchMinMatch.Hits.Count()); foreach (var hit in searchMinMatch.Hits) { Console.WriteLine(hit.Source); } //Bool Query var boolQuerySearchResult = elastic.Search <BlogPost>(es => es .Query(qu => qu .Bool(b => b .Must(m => m.Match(mt => mt.Field(f => f.Title).Query("title")) && m.Match(mt2 => mt2.Field(f => f.Body).Query("002"))))) .Sort(so => so.Field(fe => fe.Field(fe1 => fe1.Title)) .Ascending(p => p.Title))); Console.WriteLine("Search results for Bool with Must!! ==>"); Console.WriteLine(boolQuerySearchResult.Hits.Count()); foreach (var hit in boolQuerySearchResult.Hits) { Console.WriteLine(hit.Source); } //Using replacing Must with should (or) var boolQuerySearchResultShould = elastic.Search <BlogPost>(es => es .Query(qu => qu .Bool(b => b .Should(m => m.Match(mt => mt.Field(f => f.Title).Query("title")) || m.Match(mt2 => mt2.Field(f => f.Body).Query("002"))))) .Sort(so => so.Field(fe => fe.Field(fe1 => fe1.Title)) .Ascending(p => p.Title))); Console.WriteLine("Search results for Bool with Should!! ==>"); Console.WriteLine(boolQuerySearchResultShould.Hits.Count()); foreach (var hit in boolQuerySearchResultShould.Hits) { Console.WriteLine(hit.Source); } //Using bool with MUST NOT var boolQuerySearchResultMustNot = elastic.Search <BlogPost>(es => es .Query(qu => qu .Bool(b => b .Should(m => m.Match(mt => mt.Field(f => f.Title).Query("title")) || m.Match(mt2 => mt2.Field(f => f.Body).Query("002"))) .Must(ms => ms .Match(mt3 => mt3.Field(fi => fi.Body).Query("this"))) .MustNot(mn => mn .Match(mt4 => mt4.Field(fi => fi.Body).Query("003"))))) .Sort(so => so.Field(fe => fe.Field(fe1 => fe1.Title)) .Ascending(p => p.Title))); Console.WriteLine("Search results for Bool with MUST NOT!! ==>"); Console.WriteLine(boolQuerySearchResultMustNot.Hits.Count()); foreach (var hit in boolQuerySearchResultMustNot.Hits) { Console.WriteLine(hit.Source); } //Using the above query with bitwise operator var boolQuerySearchResultBitwise = elastic.Search <BlogPost>(es => es .Query(q => (q.Match(mt1 => mt1.Field(f1 => f1.Title).Query("title")) || q.Match(mt2 => mt2.Field(f2 => f2.Body).Query("002"))) && (q.Match(mt3 => mt3.Field(fe3 => fe3.Body).Query("this"))) && (!q.Match(mt4 => mt4.Field(fe4 => fe4.Body).Query("003"))))); Console.WriteLine("Search results for Bool with Bitwise operator!! ==>"); Console.WriteLine(boolQuerySearchResultBitwise.Hits.Count()); foreach (var hit in boolQuerySearchResultBitwise.Hits) { Console.WriteLine(hit.Source); } //Nested Types and Nested Query Console.WriteLine("*******Nested Types and Nested Query*************"); var authors = new[] { new Author { Id = Guid.NewGuid(), FirstName = "John", LastName = "Doe" }, new Author { Id = Guid.NewGuid(), FirstName = "Notjohn", LastName = "Doe" }, new Author { Id = Guid.NewGuid(), FirstName = "John", LastName = "Notdoe" } }; foreach (var author in authors) { IIndexResponse indexResponse2 = elastic.Index(author, i => i .Index(indexName) .Type(typeof(Author)) .Id(Guid.NewGuid()) .Refresh()); Console.WriteLine("IIndexResponse=" + indexResponse2.IsValid); } Console.WriteLine("IIndexResponse=" + indexResponse.IsValid); var blogPostWithAuthor = new[] { new BlogPost { Id = Guid.NewGuid(), Title = "test post 1", Body = "1", Author = authors[0] }, new BlogPost { Id = Guid.NewGuid(), Title = "test post 2", Body = "2", Author = authors[1] }, new BlogPost { Id = Guid.NewGuid(), Title = "test post 3", Body = "3", Author = authors[2] } }; foreach (var blogPostAuthor in blogPostWithAuthor) { var resindex = elastic.Index(blogPostAuthor, p => p .Id(blogPostAuthor.Id.ToString()) .Refresh()); Console.WriteLine("Match SearchResults IIndexResponse=" + resindex.IsValid); } Console.WriteLine("*******Nested Query*************"); var nestedQuery = elastic.Search <BlogPost>(es => es .Query(q => q .Nested(n => n .Path(b => b.Author) .Query(nq => nq.Match(m1 => m1.Field(f1 => f1.Author.FirstName).Query("John")) && nq.Match(m2 => m2.Field(f2 => f2.Author.LastName).Query("Notdoe")))) )); Console.WriteLine(nestedQuery.IsValid); Console.WriteLine(nestedQuery.Hits.Count()); foreach (var hit in nestedQuery.Hits) { Console.WriteLine(hit.Source); } Console.ReadLine(); }
public GetPageArticlesHandler(ElasticClient _client) { client = _client; }
static void Main(string[] args) { string indexName = "rni-test"; string esHostURL = "http://10.1.1.169:9200"; // Set to the host:port of elasticsearch var pool = new SingleNodeConnectionPool(new Uri(esHostURL)); // Add a custom JSON serializer to the connection settings in order to force a // conforming date format var connectionSettings = new ConnectionSettings(pool, settings => new RniJsonNetSerializer(settings)) .DefaultIndex(indexName) // Add the following to make sure that class property names are left as-is, // otherwise, they will be camelCased by NEST, causing confusion with the low-level mapping .DefaultFieldNameInferrer(p => p); var client = new ElasticClient(connectionSettings); Console.WriteLine("Checking for existing index"); if (client.IndexExists(indexName).Exists) { Console.WriteLine("Deleting index"); client.DeleteIndex(indexName); } Console.WriteLine("Creating Index"); var createResponse = client.CreateIndex(indexName); Console.WriteLine("Creating Record"); Record sampleRecord = new estest.Program.Record { Id = "1", FullName = "Joe Schmoe", LocalName = "Joe the Schmoe", DateOfBirth = new DateTime(1980, 11, 11) }; // Map custom type // Since the RNI plugin requires a custom type, rni_name, and the fluent NEST model doesn't appear to // have a way to map a custom type, use the LowLevel feature of NEST to apply the correct mapping. // This only needs to be done once. // NOTE: The type, e.g. Record and the second argument, Type, should match in case. Why the IndicesPutMapping // method can't figure out the type from the generic is a mystery. Console.WriteLine("Low-Level Map"); var mapResponse = client.LowLevel.IndicesPutMapping <Record>(indexName, "Record", sampleRecord.MapToRNITypes()); Console.WriteLine("Indexing"); var indexResponse = client.Index(sampleRecord); // This is here to make sure that the record was added. It's not necessary for operation. Console.WriteLine("Retrieve record as a check"); var getResponse = client.Get <Record>(1, idx => idx.Index(indexName)); // The search is the other area in which a lowlevel query is needed. Rather than use LowLevel exclusively, // I opted to use the Query.Raw feature to specify the custom function_score, name_score. //string customQuery = @"{ ""function_score"": { ""name_score"": { ""field"": ""FullName"", ""query_name"": ""Jo Shmoe""} } }"; // Again, to be safe, build the JSON using Newtonsoft JObject customQuery = new JObject( new JProperty("function_score", new JObject( new JProperty("name_score", new JObject( new JProperty("field", "FullName"), new JProperty("query_name", "Jo Schmoe") ) ) ) ) ); Console.WriteLine("Perform Search"); var searchResponse = client.Search <Record>(search => search .From(0) .Size(100) .Query(query => query .Match(m => m.Field(f => f.FullName).Query("Joe Schmoe")) ) .Rescore(fn => new RescoringDescriptor <Record>() .Rescore(rescore => rescore .WindowSize(200) .RescoreQuery(rescore_query => rescore_query .QueryWeight(0.0) .RescoreQueryWeight(1.0) .Query(query => query .Raw(customQuery.ToString()) // Raw required for custom function_score query ) ) ) ) ); Console.WriteLine(searchResponse.ToString()); System.Diagnostics.Debug.WriteLine(searchResponse.ToString()); // cleanup Console.WriteLine("Remove Index"); client.DeleteIndex(indexName); }
public object Get(int id) { ElasticClient es = conn.Update(indexName); return(esrepo.GetDocumentById(es, id, brand)); }
public ValuesController(IEsClientProvider clientProvider) { _client = clientProvider.GetClient(); _provider = clientProvider; }
private async Task <bool> SwitchToNextNodeAsync(CancellationToken cancellationToken = default) { if (_currentNode == null) { _currentNode = _nodes[0]; } else { var currentIndex = _nodes.IndexOf(_currentNode); if (currentIndex == _nodes.Count - 1) { _currentNode = _nodes[0]; } else { _currentNode = _nodes[currentIndex + 1]; } } var uri = new Uri(_currentNode.Host); var settings = new ConnectionSettings(uri); settings.EnableHttpCompression(); settings.MaximumRetries(_maximumRetries); settings.MaxRetryTimeout(_maxRetryTimeout); switch (_currentNode.AuthenticationType) { case AuthenticationType.Basic: settings.BasicAuthentication(_currentNode.UserName, _currentNode.Password); break; case AuthenticationType.ApiKey: settings.ApiKeyAuthentication(_currentNode.Id, _currentNode.ApiKey); break; default: break; } _client = new ElasticClient(settings); _logger.LogInformation($"Trying to connect to {uri} ({_eventLogItemsIndex})"); var response = await _client.PingAsync(pd => pd, cancellationToken); if (!(response.OriginalException is TaskCanceledException)) { if (!response.IsValid) { _logger.LogWarning($"Failed to connect to {uri} ({_eventLogItemsIndex}): {response.OriginalException.Message}"); } else { _logger.LogInformation($"Successfully connected to {uri} ({_eventLogItemsIndex})"); } } return(response.IsValid); }
static async Task Main(string[] args) { var pool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); var connectionSettings = new ConnectionSettings(pool); connectionSettings.DefaultIndex("documents"); connectionSettings.DisableDirectStreaming(); connectionSettings.PrettyJson(); var client = new ElasticClient(connectionSettings); var deleteIndexResponse = await client.Indices.DeleteAsync("documents"); var createIndexResponse = await client.Indices.CreateAsync("documents", d => d .Map(m => m.AutoMap <Document>())); var indexDocument = await client .IndexDocumentAsync(new Document { Id = 1, Properties = new List <Property> { new Property { Source = "Color", Value = "green" }, new Property { Source = "Size", Value = "2" }, } }); indexDocument = await client .IndexDocumentAsync(new Document { Id = 2, Properties = new List <Property> { new Property { Source = "Color", Value = "blue" }, new Property { Source = "Size", Value = "2" }, } }); indexDocument = await client .IndexDocumentAsync(new Document { Id = 3, Properties = new List <Property> { new Property { Source = "Color", Value = "red" }, new Property { Source = "Size", Value = "1" }, } }); var refreshAsync = client.Indices.RefreshAsync(); var query = new BoolQuery { Must = new QueryContainer[] { new NestedQuery { Path = "properties", Query = new BoolQuery() { Must = new QueryContainer[] { new TermQuery() { Field = new Nest.Field("properties.source.keyword"), Value = "Color" }, new TermsQuery() { Field = new Nest.Field("properties.value.keyword"), Terms = new[] { "green", "blue" } } } } }, new NestedQuery { Path = "properties", Query = new BoolQuery() { Must = new QueryContainer[] { new TermQuery() { Field = new Nest.Field("properties.source.keyword"), Value = "Size" }, new TermsQuery() { Field = new Nest.Field("properties.value.keyword"), Terms = new[] { "2" } } } } } } }; var response = client.Search <Document>(s => s.Query(q => query)); foreach (var document in response.Documents) { Console.WriteLine($"Id: {document.Id}"); document.Properties.ForEach(Console.WriteLine); Console.WriteLine(); } }
private static void PrepareES(Func <ElasticClient, bool> func) { var client = new ElasticClient(setting); func.Invoke(client); }
static void Main(string[] args) { try { long elapsed_time; var stopwatch = new Stopwatch(); stopwatch.Start(); Config _conf = new Config(); _conf = JsonConvert.DeserializeObject <Config>(File.ReadAllText(@"Config.json")); var node = new Uri(_conf.EsClientAddress); string ss = "ELASTIC TO CSV TOOL"; Console.Title = "CSVRiver"; Console.SetCursorPosition((Console.WindowWidth - ss.Length) / 2, Console.CursorTop); Console.ForegroundColor = ConsoleColor.DarkRed; Console.WriteLine(ss); Console.ResetColor(); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("ESClientAddress \t:{0}", _conf.EsClientAddress); Console.WriteLine("TenanID \t\t:{0}", _conf.TenantID); Console.WriteLine("Provided IndexTYPE\t:{0}", _conf.IndexType); Console.WriteLine("Provided QUERY JSON "); Console.ResetColor(); var settings = new ConnectionSettings(node).SetTimeout(100000000); //sanitize request _conf.Query = Regex.Replace(_conf.Query, @"\s+", string.Empty); //removing query:{ to pass the string as RawQuery in Search nest _conf.Query = Regex.Replace(_conf.Query, "\"query\":{", string.Empty); string Filename = _conf.OutputFilepath; _conf.OutputFilepath = Filename; var client = new ElasticClient(settings); var searchResponse = Search(_conf, client); Console.WriteLine(searchResponse.RequestInformation); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("Total Doc Count for query is:{0}", searchResponse.Total); Console.ResetColor(); List <Dictionary <string, string> > fields = new List <Dictionary <string, string> >(); foreach (var DOC in searchResponse.Hits.Select(p => p.Source.ToString()).ToList()) { var field = JsonConvert.DeserializeObject <Dictionary <string, string> >(DOC); fields.Add(field); } string columnValues = string.Join(",", fields[0].Select(p => p.Key)); List <string> joinedValues = new List <string>(); CSVRowValuesList(fields, joinedValues); CSVwriter(_conf, columnValues, joinedValues); stopwatch.Stop(); elapsed_time = stopwatch.ElapsedMilliseconds; Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine("File Successfully Saved........"); Console.WriteLine("\n\nProvided FileName:\t\t{0}", _conf.OutputFilepath); Console.WriteLine("Total ElapsedTime in Seconds\t:{0}", elapsed_time / 1000); Console.ResetColor(); } catch (IOException Ioe) { Console.WriteLine("\n\nException Occured"); Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine("Message: {0}", Ioe.Message); Console.ResetColor(); } catch (ElasticsearchServerException Ioe) { Console.WriteLine("\n\nException Occured"); Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine("Message: {0}", Ioe.Message); Console.ResetColor(); } catch (Exception Ioe) { Console.WriteLine("\n\nException Occured"); Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine("Message: {0}", Ioe.Message); Console.ResetColor(); } finally { Console.Read(); } }
public SearchQuery(ElasticClient client, string index) { this.client = client; this.index = index; }
/// <summary> /// Initializes a new instance of the <see cref="RequestEventLogger"/> class. /// </summary> /// <param name="loggerFactory">The logger factory.</param> /// <param name="configuration">The configuration.</param> public RequestEventLogger(IConfiguration configuration) { _configuration = configuration; _elasticClient = new ElasticClient(new ConnectionSettings(new Uri(_configuration.GetSection(ConfigurationConstants.Serilog).GetValue <string>(ConfigurationConstants.ElasticSearchUrl)))); }
public abstract IEnumerable <SearchResultFilterData> returnSportResult(ElasticClient EsClient, QueryContainer _objNestedQuery, string IndexName);
public Dictionary <string, object> fetchDropdowns(QueryContainer _objNestedQuery, Dictionary <string, object> ObjectArray, ElasticClient EsClient, string IndexName, Dictionary <string, string> _columns, string[] sFilterArray) { return(ObjectArray); }
//hide [U] public void ArgumentExceptionBubblesOut() { var client = new ElasticClient(new ConnectionSettings()); var e = Assert.Throws <ArgumentException>(() => client.Search <Project>()); }
public ElasticWriter(ElasticClient client, ProduceOutput produceOutput, IOptions <OutputSettings> outputSettings) { _client = client; _produceOutput = produceOutput; _outputSettings = outputSettings; }
public ElasticSearchService(ElasticClient client) { _client = client; }
public DeeplinkAction(ElasticClient client) { _client = client; }
public ElasticFilterBase() { _elasticSearchProvider = ServiceLocator.GetService <IElasticSearchProvider>(); _elasticClient = _elasticSearchProvider.GetElasticClient(); _isUseElasticSearchModule = DbSetting.Instance.UseElasicSearchModule; }
public async Task UsingOnRequestCompletedForLogging() { var list = new List <string>(); var connectionPool = new SingleNodeConnectionPool(new Uri("http://localhost:9200")); var settings = new ConnectionSettings(connectionPool, new InMemoryConnection()) // <1> Here we use `InMemoryConnection`; in reality you would use another type of `IConnection` that actually makes a request. .DefaultIndex("default-index") .DisableDirectStreaming() .OnRequestCompleted(response => { // log out the request and the request body, if one exists for the type of request if (response.RequestBodyInBytes != null) { list.Add( $"{response.HttpMethod} {response.Uri} \n" + $"{Encoding.UTF8.GetString(response.RequestBodyInBytes)}"); } else { list.Add($"{response.HttpMethod} {response.Uri}"); } // log out the response and the response body, if one exists for the type of response if (response.ResponseBodyInBytes != null) { list.Add($"Status: {response.HttpStatusCode}\n" + $"{Encoding.UTF8.GetString(response.ResponseBodyInBytes)}\n" + $"{new string('-', 30)}\n"); } else { list.Add($"Status: {response.HttpStatusCode}\n" + $"{new string('-', 30)}\n"); } }); var client = new ElasticClient(settings); var syncResponse = client.Search <object>(s => s .AllTypes() .AllIndices() .Scroll("2m") .Sort(ss => ss .Ascending(SortSpecialField.DocumentIndexOrder) ) ); list.Count.Should().Be(2); var asyncResponse = await client.SearchAsync <object>(s => s .AllTypes() .AllIndices() .Scroll("2m") .Sort(ss => ss .Ascending(SortSpecialField.DocumentIndexOrder) ) ); list.Count.Should().Be(4); list.ShouldAllBeEquivalentTo(new[] { "POST http://localhost:9200/_search?scroll=2m \n{\"sort\":[{\"_doc\":{\"order\":\"asc\"}}]}", "Status: 200\n------------------------------\n", "POST http://localhost:9200/_search?scroll=2m \n{\"sort\":[{\"_doc\":{\"order\":\"asc\"}}]}", "Status: 200\n------------------------------\n" }); }
/// <summary> /// 模拟加载博客园标题 /// </summary> /// <param name="cnblogFilename"></param> /// <param name="strMDContent"></param> /// <param name="strHTMLContent"></param> /// <param name="LimitCnt"></param> /// <param name="client"></param> public static void InsertCnblogs(string cnblogFilename, string strMDContent, string strHTMLContent, int LimitCnt, ElasticClient client, bool IsArticleRandom) { Random r = new Random(); titles.Clear(); userdic.Clear(); userColdic.Clear(); StreamReader FileReader = new StreamReader(cnblogFilename); string Line = string.Empty; int startlength = "the article title is :".Length; int endlength = " - 博客园".Length; MongoDbRepository.DrapCollection(Article.CollectionName); MongoDbRepository.DrapCollection(Collection.CollectionName); MongoDbRepository.DrapCollection(GithubAccount.CollectionName); MongoDbRepository.DrapCollection(QQAccount.CollectionName); MongoDbRepository.DrapCollection(UserInfo.CollectionName); MongoDbRepository.DrapCollection(ArticleContent.CollectionName); MongoDbRepository.DrapCollection(SiteConfig.CollectionName); var PublishStatusTypeValues = Enum.GetValues(typeof(ApproveStatus)); var ArticleLevelValues = Enum.GetValues(typeof(ArticleLevel)); int GetCnt = 0; int LineCnt = 0; while (!FileReader.EndOfStream) { Line = FileReader.ReadLine(); LineCnt++; if (Line.StartsWith("the article title is :用户登录 - 博客园")) { continue; } if (!Line.StartsWith("the article title is :")) { System.Diagnostics.Debug.WriteLine(Line); continue; } if (!Line.EndsWith(" - 博客园")) { System.Diagnostics.Debug.WriteLine(Line); continue; } try { Line = Line.Substring(startlength, Line.Length - startlength - endlength); int pos = Line.LastIndexOf(" - "); string title = Line.Substring(0, pos); string user = Line.Substring(pos + 3); if (!userdic.ContainsKey(user)) { var userinfo = new UserInfo(); int qqOrGit = r.Next(100); string accountId = string.Empty; QQAccount qqaccount = new QQAccount(); GithubAccount gitaccount = new GithubAccount(); if (qqOrGit % 2 == 0) { //Github帐号 gitaccount = new GithubAccount() { Avatar_url = "https://avatars.githubusercontent.com/u/897796?v=3", Login = user, Name = user, Email = "*****@*****.**", Location = "Shanghai,China", Blog = "http://www.mywechatapp.com", Company = "Shanghai Chuwa software co.ltd", Followers = 50, Following = 2, }; accountId = MongoDbRepository.InsertRec(gitaccount); userinfo = new UserInfo() { RegisterAccountID = accountId, Privilege = UserType.Normal, RegisterMethod = GithubAccount.Github, TopicList = new List <string>(), TagList = new List <string>(), NickName = gitaccount.Name, Avatar_url = gitaccount.Avatar_url, ContainTag = string.Empty, AntiTag = string.Empty, Catalog = new List <string>(), Level = new List <ArticleLevel>() }; } else { //QQ qqaccount = new QQAccount() { figureurl = "https://avatars.githubusercontent.com/u/19196306?v=3", gender = "男", nickname = user, OpenID = "1234567890" }; accountId = MongoDbRepository.InsertRec(qqaccount); userinfo = new UserInfo() { RegisterAccountID = accountId, Privilege = UserType.Normal, RegisterMethod = QQAccount.QQ, TopicList = new List <string>(), TagList = new List <string>(), NickName = qqaccount.nickname, Avatar_url = qqaccount.figureurl, ContainTag = string.Empty, AntiTag = string.Empty, Catalog = new List <string>(), Level = new List <ArticleLevel>() }; } var x = r.Next(100); if (x % 10 == 0) { userinfo.Privilege = UserType.Author; } else { if (x == 51) { userinfo.Privilege = UserType.Editor; } } var userId = MongoDbRepository.InsertRec(userinfo); if (userId == 1.ToString(EntityBase.SnFormat)) { userinfo.Privilege = UserType.Admin; UserInfo.UpdateUserInfo(userinfo); } if (userId == 2.ToString(EntityBase.SnFormat)) { userinfo.Privilege = UserType.Editor; UserInfo.UpdateUserInfo(userinfo); } if (qqOrGit % 2 == 0) { MongoDbRepository.UpdateRec(gitaccount, nameof(GithubAccount.UserInfoID), (BsonString)userId); } else { MongoDbRepository.UpdateRec(qqaccount, nameof(QQAccount.UserInfoID), (BsonString)userId); } userdic.Add(user, userId); //默认文集 Collection collection = new Collection() { Title = user + " 的文集", Description = user + " 的文集", IsSerie = (r.Next(100) % 2 == 1) }; var CollectionId = Collection.InsertCollection(collection, userId); userColdic.Add(user, CollectionId); } if (!titles.Contains(title)) { string ownerid = userdic[user]; string collecId = userColdic[user]; Article article = new Article() { Title = title, IsFirstPage = IsArticleRandom ? (r.Next(0, 100) % 2 == 1) : true, IsPrivate = IsArticleRandom ? (r.Next(0, 100) % 2 == 1) : false, PublishStatus = IsArticleRandom ? (ApproveStatus)(PublishStatusTypeValues.GetValue(r.Next(0, 100) % PublishStatusTypeValues.Length)) : ApproveStatus.Accept, IsCloseComment = (r.Next(0, 100) % 2 == 1), IsOriginal = (r.Next(0, 100) % 2 == 1), CollectionID = collecId, PublishDateTime = DateTime.Now.AddMinutes(r.NextDouble() * -10000), Level = (ArticleLevel)(ArticleLevelValues.GetValue(r.Next(0, 100) % ArticleLevelValues.Length)), Catalog = Article.CatalogItem[r.Next(0, 100) % Article.CatalogItem.Length].Substring(0, 4) }; if (article.IsPrivate) { article.IsFirstPage = false; article.PublishStatus = ApproveStatus.NotNeed; } else { article.IsTopicable = (r.Next(0, 100) % 2 == 1); article.IsNeedTopicApproval = (r.Next(0, 100) % 2 == 1); } if (article.IsFirstPage) { article.IsOriginal = true; article.IsCloseComment = false; } article.ConfirmDateTime = article.PublishDateTime.AddSeconds(r.Next(1000, 7200)); string ArticleId = Article.InsertArticle(article, ownerid); if (client != null && (!article.IsPrivate) && (article.PublishStatus == ApproveStatus.Accept)) { client.Index(article); } //插入MarkDown文档(保证首页只读,则只需要SaveMarkDownVersion:Current) ArticleContent.SaveMarkDownVersion(ArticleId, strMDContent, ownerid, RevisionType.Current); ArticleContent.SaveMarkDownVersion(ArticleId, strMDContent, ownerid, RevisionType.First); ArticleContent.SaveMarkDownVersion(ArticleId, strMDContent, ownerid, RevisionType.Draft); ArticleContent.SaveHTMLVersion(ArticleId, strHTMLContent, ownerid); titles.Add(title); GetCnt++; if (GetCnt == LimitCnt) { break; } } else { System.Diagnostics.Debug.WriteLine("Dulipt:" + Line); } } catch (Exception) { System.Diagnostics.Debug.WriteLine("Error:" + Line); } } FileReader.Close(); System.Diagnostics.Debug.WriteLine("LineCnt:" + LineCnt); }
public static void CreateIndex(ElasticClient client, IndexType idxTyp) { IndexSettings set = new IndexSettings(); set.NumberOfReplicas = 2; if (idxTyp == IndexType.DataSource) { set.NumberOfShards = 4; } else { set.NumberOfShards = 8; } // Create a Custom Analyzer ... var an = new CustomAnalyzer(); an.Tokenizer = "standard"; // ... with Filters from the StandardAnalyzer var filter = new List <string>(); filter.Add("lowercase"); filter.Add("czech_stop"); //an.Filter.Add("czech_keywords"); filter.Add("czech_stemmer"); //pouzit Hunspell filter.Add("asciifolding"); an.Filter = filter; // Add the Analyzer with a name set.Analysis = new Nest.Analysis() { Analyzers = new Analyzers(), TokenFilters = new TokenFilters(), }; set.Analysis.Analyzers.Add("default", an); set.Analysis.TokenFilters.Add("czech_stop", new StopTokenFilter() { StopWords = new string[] { "_czech_" } }); set.Analysis.TokenFilters.Add("czech_stemmer", new StemmerTokenFilter() { Language = "czech" }); //Humspell IndexState idxSt = new IndexState(); idxSt.Settings = set; CreateIndexResponse res = null; switch (idxTyp) { case IndexType.VerejneZakazky: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i .InitializeUsing(idxSt) .Map <Lib.Data.VZ.VerejnaZakazka>(map => map.AutoMap().DateDetection(false)) ); break; case IndexType.ProfilZadavatele: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i .InitializeUsing(idxSt) .Map <Lib.Data.VZ.ProfilZadavatele>(map => map.AutoMap().DateDetection(false)) ); break; case IndexType.Insolvence: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i //todo: es7 check .InitializeUsing(idxSt) .Map <Lib.Data.Insolvence.Rizeni>(map => map.AutoMap().DateDetection(false)) ); break; case IndexType.Dotace: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i //todo: es7 check .InitializeUsing(idxSt) .Map <Data.Dotace.Dotace>(map => map.AutoMap().DateDetection(false)) ); break; case IndexType.Smlouvy: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i //todo: es7 check .InitializeUsing(idxSt) .Map <Lib.Data.Smlouva>(map => map.AutoMap().DateDetection(false)) ); break; case IndexType.Firmy: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i //todo: es7 check .InitializeUsing(idxSt) .Map <Data.Firma.Search.FirmaInElastic>(map => map.AutoMap(maxRecursion: 1)) ); break; case IndexType.Logs: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i //todo: es7 check .InitializeUsing(idxSt) .Map <Lib.Data.Logs.ProfilZadavateleDownload>(map => map.AutoMap(maxRecursion: 1)) ); break; case IndexType.VerejneZakazkyNaProfiluRaw: res = client.Indices .Create(client.ConnectionSettings.DefaultIndex, i => i //todo: es7 check .InitializeUsing(idxSt) .Map <Lib.Data.External.ProfilZadavatelu.ZakazkaRaw>(map => map .Properties(p => p .Keyword(k => k.Name(n => n.ZakazkaId)) .Keyword(k => k.Name(n => n.Profil)) .Date(k => k.Name(n => n.LastUpdate)) ) ) ); break; } }
/// <summary> /// Create index for Call report documents /// </summary> /// <param name="documentsIndex"></param> public static void CreateIndex(IndexName documentsIndex = null) { var test = "docstest"; var index = test; var node = new Uri("http://rcovlnx3532.corp.frk.com:9200/"); var connectionSettings = new ConnectionSettings(node) .DefaultMappingFor <Document>(m => m.IndexName(index) ); var client = new ElasticClient(connectionSettings); try { var indexResponse = client.CreateIndex(index, c => c .Settings(s => s.Setting("index.soft_deletes.enabled", true) .Analysis(a => a .Analyzers(ad => ad .Custom("windows_path_hierarchy_analyzer", ca => ca .Tokenizer("windows_path_hierarchy_tokenizer") ) .Pattern("my_pattern_analyzer", p => p .Pattern("\\W|_") .Lowercase() ) .Custom("auto_complete", au => au.Tokenizer("standard").Filters("lowercase", "asciifolding", "auto-complete-filter")) .Custom("auto-complete-id", au => au.Tokenizer("standard").Filters("lowercase", "asciifolding", "auto-complete-id-filter")) ) .TokenFilters(tokenFilter => tokenFilter .EdgeNGram("auto-complete-filter", (t) => t.MinGram(3).MaxGram(20)) .EdgeNGram("auto-complete-id-filter", t => t.MinGram(1).MaxGram(5))) .Tokenizers(t => t .PathHierarchy("windows_path_hierarchy_tokenizer", ph => ph .Delimiter('\\') ) .NGram("nGramTokenizer", tokenizer => tokenizer.MinGram(3).MaxGram(20).TokenChars(TokenChar.Letter, TokenChar.Digit, TokenChar.Punctuation, TokenChar.Symbol)) ) ) ) .Mappings(m => m .Map <Document>(mp => mp .AutoMap() .AllField(all => all .Enabled(false) ) .Properties(ps => ps .Text(s => s .Name(n => n.Path) .Analyzer("windows_path_hierarchy_analyzer") ) .Text(s => s.Name(n => n.Title).Analyzer("my_pattern_analyzer")) //need to add this to break Title search with "_" .Text(s => s.Name(n => n.CompanyName).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) .Text(s => s.Name(n => n.Author).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) .Text(s => s.Name(n => n.Country).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) .Text(s => s.Name(n => n.Location).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) .Text(s => s.Name(n => n.RegionName).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) .Text(s => s.Name(n => n.SectorName).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) //.Text(s => s.Name(n => n.Attachment.Content).Analyzer("auto_complete").Fields(ff => ff.Keyword(k => k.Name("keyword")))) .Object <ElasticEntities.Attachment>(a => a .Name(n => n.Attachment) .AutoMap() ) ) ) ) ); client.PutPipeline("attachments", p => p .Description("Document attachment pipeline") .Processors(pr => pr .Attachment <Document>(a => a .Field(f => f.Content) .TargetField(f => f.Attachment) ) .Remove <Document>(r => r .Field(f => f.Content) ) ) ); PopulateIndex(client); } catch (Exception ex) { } }
public AuthenticationController(IMemoryCache memoryCache, IDataStore dataStorage, Fido2 lib, IOptions <IndexingOptions> indexOptions, ElasticClient elasticClient) { _memoryCache = memoryCache; _dataStore = dataStorage; _lib = lib; _indexOptions = indexOptions.Value; _elasticClient = elasticClient; }
public IReadOnlyCollection <Accounts> search_res(ElasticClient client, string field, string value) { ISearchResponse <Accounts> response = client.Search <Accounts>((s => s .Index("people") .From(0) .Size(10) .Query(q => q .Match(z => z .Field("firstname") .Query("Amber") ) ) )); //Prendre les informations sur ES et modifier la proprieré qu'on veux puis tout repush sur ES var getresponse = client.Get <Person>("1"); var surprise = getresponse.Source; surprise.Firstname = "check"; var updateResponse = client.Update <Person>(1, u => u .Index("letest") .Doc(surprise)); // Autre approche pour modifier du contenue dans ES var updateResponse1 = client.Update <Person>(2, u => u .Index("letest") .Doc(new Person { //Id = "2", Firstname = "teeeest" }) .DetectNoop(false) ); if (updateResponse1.IsValid == true) { Console.WriteLine("is valid"); } /* * var task = client.UpdateAsync<ElasticsearchDocument>( * new DocumentPath<ElasticsearchDocument>(doc), u => * u.Index(indexName).Doc(doc));*/ // await client.UpdateAsync<ElasticSearchDoc>(doc.Id, u => u.Index("movies").Doc(new ElasticSearchDoc { Title = "Updated title!" })); //var test = new UpdateRequest("people", "_doc", "1"); var res = response.Documents; Console.WriteLine("count " + res.Count); /* foreach (var item in res) * { * Console.WriteLine("on test " + item.firstname); * item.firstname = "Nikita"; * Console.WriteLine(item.firstname); * } */ var test2 = client.Search <Accounts>(l => l .Index("people") .Query(z => z .MatchAll())); var quer = client.Search <Accounts>((l => l .Index("people") .Size(10000) .Query(p => p .Match(m => m .Field(field) .Query(value))))); IReadOnlyCollection <Accounts> querD = quer.Documents; /* * foreach (var item in querD) * { * Console.WriteLine(item.firstname + " " + item.lastname + " " + item.age + " " + item.gender); * }*/ Console.WriteLine("there is a Count of " + querD.Count); Console.WriteLine(); return(querD); }
public void Delete(int id) { ElasticClient es = conn.Update(indexName); esrepo.DeleteDocument(es, id, brand, indexName); }
static void Main(string[] args) { elasticSettings = new ConnectionSettings(new Uri("http://127.0.0.1:9200")) .SetDefaultIndex("people"); client = new ElasticClient(elasticSettings); client.DeleteIndex("people"); // Create an index client.CreateIndex("people", c => c .NumberOfReplicas(0) .AddMapping <Person>(m => m.MapFromAttributes()) .NumberOfShards(1)); //client.MapFluent<Person>(m => m.IndexAnalyzer("standard") // .Properties(props => props // .String(s => s // .Name(p => p.Message) // .Index(FieldIndexOption.analyzed)) // .Number(n => n // .Name(p => p.Age)) // .String(s => s // .Name(p => p.FirstName)) // .String(s => s // .Name(p => p.Sex)) // .String(s => s // .Name(p => p.LastName)))); // Add some people var jp = new Person { FirstName = "JP", LastName = "Toto", Age = 37, Message = "OMG yay ES!", Sex = "Male" }; var matt = new Person { FirstName = "Matt", LastName = "Toto", Age = 37, Message = "I'm JPs brother", Sex = "Male" }; var christine = new Person { FirstName = "Christine", LastName = "Toto", Age = 0, Message = "I'm JPs wife", Sex = "Female" }; var kevin = new Person { FirstName = "Kevin", LastName = "Smith", Age = 26, Message = "I'm JPs other brother", Sex = "Male" }; client.Index(jp); client.Index(matt); client.Index(christine); client.Index(kevin); client.Flush(true); var results = client.Search <Person>(s => s .MatchAll() .FacetStatistical(fs => fs .OnField(f => f.Age) )); var facet = results.Facet <StatisticalFacet>(f => f.Age); Console.WriteLine("Statistical Facets"); Console.WriteLine(""); Console.WriteLine("Max: {0}", facet.Max); Console.WriteLine("Min: {0}", facet.Min); Console.WriteLine("Std Dev: {0}", facet.StandardDeviation); Console.WriteLine("Total: {0}", facet.Total); Console.ReadKey(); Console.Clear(); Console.WriteLine("Histogram Facets"); Console.WriteLine(""); var facetResults = client.Search <Person>(s => s .MatchAll() .FacetHistogram(fs => fs .OnField(f => f.Age) .Interval(1) )); var facet2 = facetResults.Facet <HistogramFacet>(f => f.Age); foreach (var item in facet2.Items) { Console.WriteLine("Key: {0} Count: {1}", item.Key, item.Count); } Console.ReadKey(); Console.Clear(); Console.WriteLine("Term Facets"); Console.WriteLine(""); var facetResults2 = client.Search <Person>(s => s .From(0) .Size(10) .MatchAll() .FacetTerm(t => t.OnField(f => f.LastName).Size(20)) ); var facet3 = facetResults2.Facet <TermFacet>(f => f.LastName); foreach (var item in facet3.Items) { Console.WriteLine("Key: {0} Count: {1}", item.Term, item.Count); } Console.ReadKey(); }
/// <summary> /// 搜索日志 /// </summary> /// <param name="highlight"></param> /// <param name="start"></param> /// <param name="end"></param> /// <param name="keyword"></param> /// <param name="logger_name"></param> /// <param name="page"></param> /// <param name="pagesize"></param> /// <returns></returns> public static async Task <PagerData <ESLogLine, QueryExtData> > Search( bool highlight = true, DateTime?start = null, DateTime?end = null, string keyword = null, string logger_name = null, int page = 1, int pagesize = 10) { var sd = new SearchDescriptor <ESLogLine>(); sd = sd.Index(IndexName); var query = new QueryContainer(); if (start != null) { query &= new DateRangeQuery() { Field = nameof(temp.UpdateTime), GreaterThanOrEqualTo = start.Value }; } if (end != null) { query &= new DateRangeQuery() { Field = nameof(temp.UpdateTime), LessThan = end.Value }; } if (ValidateHelper.IsPlumpString(keyword)) { query &= new MatchQuery() { Field = nameof(temp.Message), Query = keyword, Operator = Operator.Or, MinimumShouldMatch = "100%" }; } if (ValidateHelper.IsPlumpString(logger_name)) { query &= new TermQuery() { Field = nameof(temp.LoggerName), Value = logger_name }; } //查询条件 sd = sd.Query(_ => query); //聚合 sd = sd.Aggregations(x => x.Terms(nameof(temp.LoggerName), av => av.Field(nameof(temp.LoggerName)).Size(1000)) .Terms(nameof(temp.Level), av => av.Field(nameof(temp.Level)).Size(1000)) .Terms(nameof(temp.Domain), av => av.Field(nameof(temp.Domain)).Size(1000))); //高亮 if (highlight) { sd = sd.AddHighlightWrapper("<em class='kwd'>", "</em>", x => x.Field(nameof(temp.Message))); } //排序 var sort = new SortDescriptor <ESLogLine>(); sort = sort.Descending(x => x.UpdateTime); sort = sort.Descending(Field.Create("_score", boost: null)); sd = sd.Sort(_ => sort); //分页 sd = sd.QueryPage_(page, pagesize); //请求服务器 var client = new ElasticClient(ElasticsearchClientManager.Instance.DefaultClient); var re = await client.SearchAsync <ESLogLine>(_ => sd); re.ThrowIfException(); var data = new PagerData <ESLogLine, QueryExtData>(); data.ItemCount = (int)re.Total; data.DataList = re.Hits.Select(x => x.Source).ToList(); //聚合数据 data.ExtData = new QueryExtData(); data.ExtData.Ass = re.GetAggs(); data.ExtData.Highlight = re.GetHighlights(); return(data); }
public void BeforeAllElasticsearchTests() { ElasticsearchClient = SetupElasticsearchConnection(); }
public static ISearchResponse <ImDbBasicTitle> SearchMovies(ElasticClient client, string index, SearchRequest <ImDbBasicTitle> request) { var response = client.Search <ImDbBasicTitle>(request); return(response); }
public ELSMemberRepository(ILogger <ILogger> logger, IOptions <ELSOptions> eLSOptions) { this.logger = logger; this.client = new ElasticClient(new ConnectionSettings(new Uri(eLSOptions.Value.Uri)) .DefaultIndex(eLSOptions.Value.DefaultIndex)); }