public void CreateIndexShouldNotThrowNullReference() { var settings = new IndexSettings(); settings.Similarity = new SimilaritySettings(); settings.NumberOfReplicas = 1; settings.NumberOfShards = 5; settings.Add("index.refresh_interval", "10s"); settings.Add("merge.policy.merge_factor", "10"); settings.Add("search.slowlog.threshold.fetch.warn", "1s"); settings.Analysis.Analyzers.Add(new KeyValuePair <string, AnalyzerBase>("keyword", new KeywordAnalyzer())); settings.Analysis.Analyzers.Add(new KeyValuePair <string, AnalyzerBase>("simple", new SimpleAnalyzer())); settings.Mappings.Add(new RootObjectMapping { Name = "my_root_object", Properties = new Dictionary <string, IElasticType> { { "my_field", new StringMapping() { Name = "my_string_field " } } } }); Assert.DoesNotThrow(() => { var idxRsp = this._client.CreateIndex(ElasticsearchConfiguration.NewUniqueIndexName(), settings); Assert.IsTrue(idxRsp.IsValid, idxRsp.ConnectionStatus.ToString()); }); }
public void UpdateSettingsSimple() { var index = Guid.NewGuid().ToString(); var client = this.ConnectedClient; var settings = new IndexSettings(); settings.NumberOfReplicas = 1; settings.NumberOfShards = 5; settings.Add("refresh_interval", "1s"); settings.Add("search.slowlog.threshold.fetch.warn", "1s"); client.CreateIndex(index, settings); settings["refresh_interval"] = "-1"; settings["search.slowlog.threshold.fetch.warn"] = "5s"; var r = this.ConnectedClient.UpdateSettings(index, settings); Assert.True(r.IsValid); Assert.True(r.OK); var getResponse = this.ConnectedClient.GetIndexSettings(index); Assert.AreEqual(getResponse.Settings["refresh_interval"], "-1"); Assert.AreEqual(getResponse.Settings["search.slowlog.threshold.fetch.warn"], "1s"); this.ConnectedClient.DeleteIndex(index); }
public void CreateIndexShouldNotThrowNullReference() { var settings = new IndexSettings(); settings.Similarity = new SimilaritySettings(); settings.NumberOfReplicas = 1; settings.NumberOfShards = 5; settings.Add("index.refresh_interval", "10s"); settings.Add("merge.policy.merge_factor", "10"); settings.Add("search.slowlog.threshold.fetch.warn", "1s"); settings.Analysis.Analyzers.Add(new KeyValuePair<string, AnalyzerBase>("keyword", new KeywordAnalyzer())); settings.Analysis.Analyzers.Add(new KeyValuePair<string, AnalyzerBase>("simple", new SimpleAnalyzer())); settings.Mappings.Add(new RootObjectMapping { Name = "my_root_object", Properties = new Dictionary<PropertyNameMarker, IElasticType> { {"my_field", new StringMapping() { Name = "my_string_field "}} } }); Assert.DoesNotThrow(() => { var idxRsp = this._client.CreateIndex(ElasticsearchConfiguration.NewUniqueIndexName(), i=>i.InitializeUsing(settings)); Assert.IsTrue(idxRsp.IsValid, idxRsp.ConnectionStatus.ToString()); }); }
public void GetIndexSettingsComplex() { var index = Guid.NewGuid().ToString(); var settings = new IndexSettings(); settings.NumberOfReplicas = 4; settings.NumberOfShards = 8; settings.Analysis.Analyzer.Add("snowball", new SnowballAnalyzerSettings { Language = "English" }); var typeMapping = this.ConnectedClient.GetMapping(Test.Default.DefaultIndex, "elasticsearchprojects"); typeMapping.Name = index; settings.Mappings.Add(typeMapping); settings.Add("merge.policy.merge_factor", "10"); var createResponse = this.ConnectedClient.CreateIndex(index, settings); var r = this.ConnectedClient.GetIndexSettings(index); Assert.True(r.IsValid); Assert.NotNull(r.Settings); Assert.AreEqual(r.Settings.NumberOfReplicas, 4); Assert.AreEqual(r.Settings.NumberOfShards, 8); Assert.Greater(r.Settings.Count(), 0); Assert.True(r.Settings.ContainsKey("merge.policy.merge_factor")); this.ConnectedClient.DeleteIndex(index); }
private static void RecreateIndex(string suffix) { string indexName = INDEX_PREFIX + suffix; var connSettings = new ConnectionSettings("localhost", 9200) .SetDefaultIndex(indexName); var client = new ElasticClient(connSettings); ConnectionStatus connStatus; if (!client.TryConnect(out connStatus)) { Console.Error.WriteLine("Could not connect to {0}:\r\n{1}", connSettings.Host, connStatus.Error.OriginalException.Message); Console.Read(); return; } client.DeleteIndex(indexName); var indexSettings = new IndexSettings(); indexSettings.NumberOfReplicas = 1; indexSettings.NumberOfShards = 5; indexSettings.Add("index.refresh_interval", "10s"); var createResponse = client.CreateIndex(indexName, indexSettings); client.MapFromAttributes <Message>(); }
private bool CreateIndex(string indexName) { if (client.IndexExists(indexName).Exists) { return(true); } var indexSettings = new IndexSettings(); indexSettings.Add("index.store.compress.stored", true); indexSettings.Add("index.store.compress.tv", true); indexSettings.Add("index.query.default_field", "@message"); IIndicesOperationResponse result = client.CreateIndex(indexName, indexSettings); CreateMappings(indexName); if (!result.OK) { logger.Error(string.Format("Failed to create index: '{0}'. Result: '{1}' Retrying...", indexName, result.ConnectionStatus.Result)); } return(result.OK); }
public void ShouldBeAbleToSetIndexToReadonly() { var settings = new IndexSettings(); settings.Similarity = new SimilaritySettings(); settings.NumberOfReplicas = 1; settings.NumberOfShards = 5; settings.Add("index.blocks.read_only", "true"); var indexName = ElasticsearchConfiguration.NewUniqueIndexName(); var idxRsp = this._client.CreateIndex(indexName, i=>i.InitializeUsing(settings)); Assert.IsTrue(idxRsp.IsValid, idxRsp.ConnectionStatus.ToString()); var getSettingsResponse = this._client.GetIndexSettings(i=>i.Index(indexName)); Assert.IsTrue(getSettingsResponse.IsValid, getSettingsResponse.ConnectionStatus.ToString()); getSettingsResponse.Settings.Should().ContainKey("index.blocks.read_only"); getSettingsResponse.Settings["index.blocks.read_only"].Should().Be("true"); }
public void ShouldBeAbleToSetIndexToReadonly() { var settings = new IndexSettings(); settings.Similarity = new SimilaritySettings(); settings.NumberOfReplicas = 1; settings.NumberOfShards = 5; settings.Add("index.blocks.read_only", "true"); var indexName = ElasticsearchConfiguration.NewUniqueIndexName(); var idxRsp = this._client.CreateIndex(indexName, settings); Assert.IsTrue(idxRsp.IsValid, idxRsp.ConnectionStatus.ToString()); var getSettingsResponse = this._client.GetIndexSettings(indexName); Assert.IsTrue(getSettingsResponse.IsValid, getSettingsResponse.ConnectionStatus.ToString()); getSettingsResponse.Settings.Should().ContainKey("index.blocks.read_only"); getSettingsResponse.Settings["index.blocks.read_only"].Should().Be("true"); }
private static void RecreateIndex(string suffix) { var host = "localhost"; if (Process.GetProcessesByName("fiddler").Any()) { host = "ipv4.fiddler"; } string indexName = INDEX_PREFIX + suffix; var connSettings = new ConnectionSettings(new Uri("http://" + host + ":9200")) .SetDefaultIndex(indexName); var client = new ElasticClient(connSettings); var result = client.RootNodeInfo(); if (!result.IsValid) { Console.Error.WriteLine("Could not connect to {0}:\r\n{1}", connSettings.Host, result.ConnectionStatus.Error.OriginalException.Message); Console.Read(); return; } client.DeleteIndex(indexName); var indexSettings = new IndexSettings(); indexSettings.NumberOfReplicas = 1; indexSettings.NumberOfShards = 5; indexSettings.Add("index.refresh_interval", "-1"); var createResponse = client.CreateIndex(indexName, indexSettings); client.MapFromAttributes <Message>(); }
public void UpdateSettingsSimple() { var index = Guid.NewGuid().ToString(); var client = this._client; var settings = new IndexSettings(); settings.NumberOfReplicas = 1; settings.NumberOfShards = 5; settings.Add("refresh_interval", "1s"); settings.Add("search.slowlog.threshold.fetch.warn", "1s"); client.CreateIndex(index, settings); settings["refresh_interval"] = "-1"; settings["search.slowlog.threshold.fetch.warn"] = "5s"; var r = this._client.UpdateSettings(index, settings); Assert.True(r.IsValid); Assert.True(r.OK); var getResponse = this._client.GetIndexSettings(index); Assert.AreEqual(getResponse.Settings["refresh_interval"], "-1"); Assert.AreEqual(getResponse.Settings["search.slowlog.threshold.fetch.warn"], "1s"); this._client.DeleteIndex(index); }
public void GetIndexSettingsComplex() { var index = Guid.NewGuid().ToString(); var settings = new IndexSettings(); settings.NumberOfReplicas = 4; settings.NumberOfShards = 8; settings.Analysis.Analyzers.Add("snowball", new SnowballAnalyzer { Language = "English" }); settings.Analysis.Analyzers.Add("standard", new StandardAnalyzer { StopWords = new[]{"word1", "word2"}}); settings.Analysis.Analyzers.Add("swedishlanguage", new LanguageAnalyzer(Language.Swedish) { StopWords = new[] { "word1", "word2" }, StemExclusionList = new[] { "stem1", "stem2" } }); settings.Analysis.CharFilters.Add("char1", new HtmlStripCharFilter()); settings.Analysis.CharFilters.Add("char2", new MappingCharFilter{ Mappings = new []{"ph=>f", "qu=>q"}}); settings.Analysis.TokenFilters.Add("tokenfilter1", new EdgeNGramTokenFilter()); settings.Analysis.TokenFilters.Add("tokenfilter2", new SnowballTokenFilter()); settings.Analysis.Tokenizers.Add("token1", new KeywordTokenizer()); settings.Analysis.Tokenizers.Add("token2", new PathHierarchyTokenizer()); settings.Similarity = new SimilaritySettings(); var dfr = new CustomSimilaritySettings("test1", "DFR"); dfr.SimilarityParameters.Add("basic_model", "g"); dfr.SimilarityParameters.Add("after_effect", "l"); dfr.SimilarityParameters.Add("normalization", "h2"); dfr.SimilarityParameters.Add("normalization.h2.c", 3); settings.Similarity.CustomSimilarities.Add(dfr); var ib = new CustomSimilaritySettings("test2", "IB"); ib.SimilarityParameters.Add("distribution", "spl"); ib.SimilarityParameters.Add("lambda", "ttf"); ib.SimilarityParameters.Add("normalization", "h1"); settings.Similarity.CustomSimilarities.Add(ib); var typeMapping = this._client.GetMapping(ElasticsearchConfiguration.DefaultIndex, "elasticsearchprojects"); typeMapping.TypeNameMarker = index; settings.Mappings.Add(typeMapping); settings.Add("merge.policy.merge_factor", "10"); var createResponse = this._client.CreateIndex(index, settings); var r = this._client.GetIndexSettings(index); Assert.True(r.IsValid); Assert.NotNull(r.Settings); Assert.AreEqual(r.Settings.NumberOfReplicas, 4); Assert.AreEqual(r.Settings.NumberOfShards, 8); Assert.Greater(r.Settings.Count(), 0); Assert.True(r.Settings.ContainsKey("merge.policy.merge_factor")); Assert.AreEqual("10", r.Settings["merge.policy.merge_factor"]); Assert.AreEqual(3, r.Settings.Analysis.Analyzers.Count); { // assert analyzers Assert.True(r.Settings.Analysis.Analyzers.ContainsKey("snowball")); var snoballAnalyser = r.Settings.Analysis.Analyzers["snowball"] as SnowballAnalyzer; Assert.NotNull(snoballAnalyser); Assert.AreEqual("English", snoballAnalyser.Language); Assert.True(r.Settings.Analysis.Analyzers.ContainsKey("standard")); var standardAnalyser = r.Settings.Analysis.Analyzers["standard"] as StandardAnalyzer; Assert.NotNull(standardAnalyser); Assert.NotNull(standardAnalyser.StopWords); Assert.AreEqual(2, standardAnalyser.StopWords.Count()); Assert.True(standardAnalyser.StopWords.Contains("word1")); Assert.True(standardAnalyser.StopWords.Contains("word2")); Assert.True(r.Settings.Analysis.Analyzers.ContainsKey("swedishlanguage")); var languageAnalyser = r.Settings.Analysis.Analyzers["swedishlanguage"] as LanguageAnalyzer; Assert.NotNull(languageAnalyser); Assert.AreEqual(Language.Swedish.ToString().ToLower(), languageAnalyser.Type); Assert.NotNull(languageAnalyser.StopWords); Assert.AreEqual(2, languageAnalyser.StopWords.Count()); Assert.True(languageAnalyser.StopWords.Contains("word1")); Assert.True(languageAnalyser.StopWords.Contains("word2")); Assert.AreEqual(2, languageAnalyser.StemExclusionList.Count()); Assert.True(languageAnalyser.StemExclusionList.Contains("stem1")); Assert.True(languageAnalyser.StemExclusionList.Contains("stem2")); } Assert.AreEqual(2, r.Settings.Analysis.CharFilters.Count); { // assert char filters Assert.True(r.Settings.Analysis.CharFilters.ContainsKey("char1")); var filter1 = r.Settings.Analysis.CharFilters["char1"] as HtmlStripCharFilter; Assert.NotNull(filter1); Assert.True(r.Settings.Analysis.CharFilters.ContainsKey("char2")); var filter2 = r.Settings.Analysis.CharFilters["char2"] as MappingCharFilter; Assert.NotNull(filter2); Assert.AreEqual(2, filter2.Mappings.Count()); Assert.True(filter2.Mappings.Contains("ph=>f")); Assert.True(filter2.Mappings.Contains("qu=>q")); } Assert.AreEqual(2, r.Settings.Analysis.TokenFilters.Count); { // assert token filters Assert.True(r.Settings.Analysis.TokenFilters.ContainsKey("tokenfilter1")); var filter1 = r.Settings.Analysis.TokenFilters["tokenfilter1"] as EdgeNGramTokenFilter; Assert.NotNull(filter1); Assert.True(r.Settings.Analysis.TokenFilters.ContainsKey("tokenfilter2")); var filter2 = r.Settings.Analysis.TokenFilters["tokenfilter2"] as SnowballTokenFilter; Assert.NotNull(filter2); } Assert.AreEqual(2, r.Settings.Analysis.Tokenizers.Count); { // assert tokenizers Assert.True(r.Settings.Analysis.Tokenizers.ContainsKey("token1")); var tokenizer1 = r.Settings.Analysis.Tokenizers["token1"] as KeywordTokenizer; Assert.NotNull(tokenizer1); Assert.True(r.Settings.Analysis.Tokenizers.ContainsKey("token2")); var tokenizer2 = r.Settings.Analysis.Tokenizers["token2"] as PathHierarchyTokenizer; Assert.NotNull(tokenizer2); } Assert.NotNull(r.Settings.Similarity); Assert.NotNull(r.Settings.Similarity.CustomSimilarities); Assert.AreEqual(2, r.Settings.Similarity.CustomSimilarities.Count); { // assert similarity var similarity1 = r.Settings.Similarity.CustomSimilarities.FirstOrDefault(x => x.Name.Equals("test1", StringComparison.InvariantCultureIgnoreCase)); Assert.NotNull(similarity1); Assert.AreEqual("DFR", similarity1.Type); Assert.AreEqual(4, similarity1.SimilarityParameters.Count); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("basic_model") && x.Value.ToString().Equals("g"))); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("after_effect") && x.Value.ToString().Equals("l"))); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("normalization") && x.Value.ToString().Equals("h2"))); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("normalization.h2.c") && x.Value.ToString().Equals("3"))); var similarity2 = r.Settings.Similarity.CustomSimilarities.FirstOrDefault(x => x.Name.Equals("test2", StringComparison.InvariantCultureIgnoreCase)); Assert.NotNull(similarity2); Assert.AreEqual("IB", similarity2.Type); Assert.AreEqual(3, similarity2.SimilarityParameters.Count); Assert.True(similarity2.SimilarityParameters.Any(x => x.Key.Equals("distribution") && x.Value.ToString().Equals("spl"))); Assert.True(similarity2.SimilarityParameters.Any(x => x.Key.Equals("lambda") && x.Value.ToString().Equals("ttf"))); Assert.True(similarity2.SimilarityParameters.Any(x => x.Key.Equals("normalization") && x.Value.ToString().Equals("h1"))); } this._client.DeleteIndex(index); }
public void GetIndexSettingsComplex() { var index = Guid.NewGuid().ToString(); var settings = new IndexSettings(); settings.NumberOfReplicas = 4; settings.NumberOfShards = 8; settings.Analysis.Analyzers.Add("snowball", new SnowballAnalyzer { Language = "English" }); settings.Analysis.Analyzers.Add("standard", new StandardAnalyzer { StopWords = new[] { "word1", "word2" } }); settings.Analysis.Analyzers.Add("swedishlanguage", new LanguageAnalyzer(Language.Swedish) { StopWords = new[] { "word1", "word2" }, StemExclusionList = new[] { "stem1", "stem2" } }); settings.Analysis.CharFilters.Add("char1", new HtmlStripCharFilter()); settings.Analysis.CharFilters.Add("char2", new MappingCharFilter { Mappings = new [] { "ph=>f", "qu=>q" } }); settings.Analysis.TokenFilters.Add("tokenfilter1", new EdgeNGramTokenFilter()); settings.Analysis.TokenFilters.Add("tokenfilter2", new SnowballTokenFilter()); settings.Analysis.Tokenizers.Add("token1", new KeywordTokenizer()); settings.Analysis.Tokenizers.Add("token2", new PathHierarchyTokenizer()); settings.Similarity = new SimilaritySettings(); var dfr = new CustomSimilaritySettings("test1", "DFR"); dfr.SimilarityParameters.Add("basic_model", "g"); dfr.SimilarityParameters.Add("after_effect", "l"); dfr.SimilarityParameters.Add("normalization", "h2"); dfr.SimilarityParameters.Add("normalization.h2.c", 3); settings.Similarity.CustomSimilarities.Add(dfr); var ib = new CustomSimilaritySettings("test2", "IB"); ib.SimilarityParameters.Add("distribution", "spl"); ib.SimilarityParameters.Add("lambda", "ttf"); ib.SimilarityParameters.Add("normalization", "h1"); settings.Similarity.CustomSimilarities.Add(ib); var typeMapping = this._client.GetMapping(ElasticsearchConfiguration.DefaultIndex, "elasticsearchprojects"); typeMapping.TypeNameMarker = index; settings.Mappings.Add(typeMapping); settings.Add("merge.policy.merge_factor", "10"); var createResponse = this._client.CreateIndex(index, settings); var r = this._client.GetIndexSettings(index); Assert.True(r.IsValid); Assert.NotNull(r.Settings); Assert.AreEqual(r.Settings.NumberOfReplicas, 4); Assert.AreEqual(r.Settings.NumberOfShards, 8); Assert.Greater(r.Settings.Count(), 0); Assert.True(r.Settings.ContainsKey("merge.policy.merge_factor")); Assert.AreEqual("10", r.Settings["merge.policy.merge_factor"]); Assert.AreEqual(3, r.Settings.Analysis.Analyzers.Count); { // assert analyzers Assert.True(r.Settings.Analysis.Analyzers.ContainsKey("snowball")); var snoballAnalyser = r.Settings.Analysis.Analyzers["snowball"] as SnowballAnalyzer; Assert.NotNull(snoballAnalyser); Assert.AreEqual("English", snoballAnalyser.Language); Assert.True(r.Settings.Analysis.Analyzers.ContainsKey("standard")); var standardAnalyser = r.Settings.Analysis.Analyzers["standard"] as StandardAnalyzer; Assert.NotNull(standardAnalyser); Assert.NotNull(standardAnalyser.StopWords); Assert.AreEqual(2, standardAnalyser.StopWords.Count()); Assert.True(standardAnalyser.StopWords.Contains("word1")); Assert.True(standardAnalyser.StopWords.Contains("word2")); Assert.True(r.Settings.Analysis.Analyzers.ContainsKey("swedishlanguage")); var languageAnalyser = r.Settings.Analysis.Analyzers["swedishlanguage"] as LanguageAnalyzer; Assert.NotNull(languageAnalyser); Assert.AreEqual(Language.Swedish.ToString().ToLower(), languageAnalyser.Type); Assert.NotNull(languageAnalyser.StopWords); Assert.AreEqual(2, languageAnalyser.StopWords.Count()); Assert.True(languageAnalyser.StopWords.Contains("word1")); Assert.True(languageAnalyser.StopWords.Contains("word2")); Assert.AreEqual(2, languageAnalyser.StemExclusionList.Count()); Assert.True(languageAnalyser.StemExclusionList.Contains("stem1")); Assert.True(languageAnalyser.StemExclusionList.Contains("stem2")); } Assert.AreEqual(2, r.Settings.Analysis.CharFilters.Count); { // assert char filters Assert.True(r.Settings.Analysis.CharFilters.ContainsKey("char1")); var filter1 = r.Settings.Analysis.CharFilters["char1"] as HtmlStripCharFilter; Assert.NotNull(filter1); Assert.True(r.Settings.Analysis.CharFilters.ContainsKey("char2")); var filter2 = r.Settings.Analysis.CharFilters["char2"] as MappingCharFilter; Assert.NotNull(filter2); Assert.AreEqual(2, filter2.Mappings.Count()); Assert.True(filter2.Mappings.Contains("ph=>f")); Assert.True(filter2.Mappings.Contains("qu=>q")); } Assert.AreEqual(2, r.Settings.Analysis.TokenFilters.Count); { // assert token filters Assert.True(r.Settings.Analysis.TokenFilters.ContainsKey("tokenfilter1")); var filter1 = r.Settings.Analysis.TokenFilters["tokenfilter1"] as EdgeNGramTokenFilter; Assert.NotNull(filter1); Assert.True(r.Settings.Analysis.TokenFilters.ContainsKey("tokenfilter2")); var filter2 = r.Settings.Analysis.TokenFilters["tokenfilter2"] as SnowballTokenFilter; Assert.NotNull(filter2); } Assert.AreEqual(2, r.Settings.Analysis.Tokenizers.Count); { // assert tokenizers Assert.True(r.Settings.Analysis.Tokenizers.ContainsKey("token1")); var tokenizer1 = r.Settings.Analysis.Tokenizers["token1"] as KeywordTokenizer; Assert.NotNull(tokenizer1); Assert.True(r.Settings.Analysis.Tokenizers.ContainsKey("token2")); var tokenizer2 = r.Settings.Analysis.Tokenizers["token2"] as PathHierarchyTokenizer; Assert.NotNull(tokenizer2); } Assert.NotNull(r.Settings.Similarity); Assert.NotNull(r.Settings.Similarity.CustomSimilarities); Assert.AreEqual(2, r.Settings.Similarity.CustomSimilarities.Count); { // assert similarity var similarity1 = r.Settings.Similarity.CustomSimilarities.FirstOrDefault(x => x.Name.Equals("test1", StringComparison.InvariantCultureIgnoreCase)); Assert.NotNull(similarity1); Assert.AreEqual("DFR", similarity1.Type); Assert.AreEqual(4, similarity1.SimilarityParameters.Count); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("basic_model") && x.Value.ToString().Equals("g"))); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("after_effect") && x.Value.ToString().Equals("l"))); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("normalization") && x.Value.ToString().Equals("h2"))); Assert.True(similarity1.SimilarityParameters.Any(x => x.Key.Equals("normalization.h2.c") && x.Value.ToString().Equals("3"))); var similarity2 = r.Settings.Similarity.CustomSimilarities.FirstOrDefault(x => x.Name.Equals("test2", StringComparison.InvariantCultureIgnoreCase)); Assert.NotNull(similarity2); Assert.AreEqual("IB", similarity2.Type); Assert.AreEqual(3, similarity2.SimilarityParameters.Count); Assert.True(similarity2.SimilarityParameters.Any(x => x.Key.Equals("distribution") && x.Value.ToString().Equals("spl"))); Assert.True(similarity2.SimilarityParameters.Any(x => x.Key.Equals("lambda") && x.Value.ToString().Equals("ttf"))); Assert.True(similarity2.SimilarityParameters.Any(x => x.Key.Equals("normalization") && x.Value.ToString().Equals("h1"))); } this._client.DeleteIndex(index); }
private async Task MappingSitePackConfig(string indexName) { if (!Client.Value.IndexExists(_config.SitePackIndex).Exists) { _logger.LogInformation($"Index {indexName} doesn't exist so will be created"); var response = Client.Value.CreateIndex(indexName, c => c .Settings(s => s .Setting(nameof(max_result_window), max_result_window) .Analysis(_sitepackAnalysis)) .Mappings(m => m.Map <SitePackChannel>(x => x .Properties(p => p.Keyword(t => t.Name(pt => pt.MediaType)) .Keyword(t => t.Name(pt => pt.Site)) .Keyword(t => t.Name(pt => pt.Source)) .Keyword(t => t.Name(pt => pt.Site_id)) .Keyword(t => t.Name(pt => pt.Xmltv_id)) .Date(t => t.Name(pt => pt.Update)) .Keyword(t => t.Name(pt => pt.Country)) .Text(t => t .Name(pt => pt.DisplayNames) .Fields(f => f.Keyword(k => k.Name(keywordProperty))) .Analyzer("sitepack_name_analyzer") .SearchAnalyzer("sitepack_name_analyzer")) )) )); _logger.LogDebug(response.DebugInformation); } else { var index = await Client.Value.GetIndexAsync(indexName); if (!index.Indices[indexName].Settings.ContainsKey(nameof(max_result_window))) { var responseClose = await Client.Value.CloseIndexAsync(indexName); if (responseClose.IsValid) { _logger.LogInformation($"Applying settings for index : {indexName}"); var indexSettings = new IndexSettings() { Analysis = _sitepackAnalysis(new AnalysisDescriptor()) }; indexSettings.Add(nameof(max_result_window), max_result_window); var responseSetting = await Client.Value.UpdateIndexSettingsAsync(new UpdateIndexSettingsRequest(indexName) { IndexSettings = indexSettings }); if (!responseSetting.IsValid) { _logger.LogError($"{responseSetting.ServerError.Error.ToString()}"); if (responseSetting.TryGetServerErrorReason(out string reason)) { _logger.LogError($"{reason}"); } } await Client.Value.MapAsync <SitePackChannel>(x => x .Properties(p => p.Keyword(t => t.Name(pt => pt.MediaType)) .Keyword(t => t.Name(pt => pt.Site)) .Keyword(t => t.Name(pt => pt.Source)) .Keyword(t => t.Name(pt => pt.Site_id)) .Keyword(t => t.Name(pt => pt.Xmltv_id)) .Date(t => t.Name(pt => pt.Update)) .Keyword(t => t.Name(pt => pt.Country)) .Text(t => t .Name(pt => pt.DisplayNames) .Fields(f => f.Keyword(k => k.Name(keywordProperty))) .Analyzer("sitepack_name_analyzer") .SearchAnalyzer("sitepack_name_analyzer")) )); var responseOpen = await Client.Value.OpenIndexAsync(indexName); } } } }
public void GetIndexSettingsComplex() { var index = Guid.NewGuid().ToString(); var settings = new IndexSettings(); settings.NumberOfReplicas = 4; settings.NumberOfShards = 8; settings.Analysis.Analyzers.Add("snowball", new SnowballAnalyzer { Language = "English" }); var typeMapping = this._client.GetMapping(ElasticsearchConfiguration.DefaultIndex, "elasticsearchprojects"); typeMapping.Name = index; settings.Mappings.Add(typeMapping); settings.Add("merge.policy.merge_factor", "10"); var createResponse = this._client.CreateIndex(index, settings); var r = this._client.GetIndexSettings(index); Assert.True(r.IsValid); Assert.NotNull(r.Settings); Assert.AreEqual(r.Settings.NumberOfReplicas, 4); Assert.AreEqual(r.Settings.NumberOfShards, 8); Assert.Greater(r.Settings.Count(), 0); Assert.True(r.Settings.ContainsKey("merge.policy.merge_factor")); this._client.DeleteIndex(index); }
private CreateIndexDescriptor GetCreateIndexDescriptor(string indexName, Type objectType) { #region [ Default analyzers and filters ] // Add custom index analyzers CustomAnalyzers.Add("full_string_index_analyzer", new CustomAnalyzer { Tokenizer = "standard", Filter = new List <string> { "standard", "string_delimeter", "stop", "asciifolding", "string_ngrams", "lowercase" } }); CustomAnalyzers.Add("full_keyword_index_analyzer", new CustomAnalyzer { Tokenizer = "keyword", Filter = new List <string> { "standard", "stop", "asciifolding" } }); // Add custom search analyzers CustomAnalyzers.Add("full_string_search_analyzer", new CustomAnalyzer { Tokenizer = "standard", Filter = new List <string> { "standard", "stop", "asciifolding", "lowercase" } }); #endregion // Create a default descriptor CreateIndexDescriptor descriptor = null; // Create default settings var settings = new IndexSettings() { NumberOfReplicas = 1, NumberOfShards = 2 }; // Add additional settings settings.Analysis = new Analysis(); settings.Analysis.TokenFilters = new TokenFilters(); settings.Analysis.Analyzers = new Analyzers(); //settings.Add("index.mapping.single_type", false); settings.Add("index.mapping.total_fields.limit", 2000); settings.Add("index.mapping.nested_fields.limit", 500); settings.Add("index.max_docvalue_fields_search", 500); // Create token filters var stringNGramsTokenFilter = new EdgeNGramTokenFilter { MinGram = 2, MaxGram = 20 }; var stringDelimiterTokenFilter = new WordDelimiterTokenFilter { GenerateWordParts = true, CatenateAll = true, CatenateNumbers = true, CatenateWords = true, SplitOnCaseChange = true, SplitOnNumerics = true, PreserveOriginal = true }; // Add filters settings.Analysis.TokenFilters.Add("string_ngrams", stringNGramsTokenFilter); settings.Analysis.TokenFilters.Add("string_delimeter", stringDelimiterTokenFilter); // Add analyzers CustomAnalyzers.ToList().ForEach(a => { settings.Analysis.Analyzers.Add(a.Key, a.Value); }); // Create the config var indexConfig = new IndexState { Settings = settings }; #region [ LogRecord Mapping ] // Fill the descriptor according to the type if (objectType == typeof(ESLogRecord)) { descriptor = new CreateIndexDescriptor(indexName) .InitializeUsing(indexConfig) .Mappings(ms => ms.Map <ESLogRecord>(m => m.AutoMap())); } #endregion return(descriptor); }