bool createIndex(string indexName, ElasticClient client) { if (client.IndexExists(i => i.Index(indexName)).Exists) { var response = client.DeleteIndex(i => i.Index(indexName)); Logger.Current.Verbose("Deleted index." + response.ConnectionStatus.ToString()); } var word_delimiter_filter = new WordDelimiterTokenFilter() { CatenateAll = true, GenerateNumberParts = false, SplitOnCaseChange = false, GenerateWordParts = false, SplitOnNumerics = false, PreserveOriginal = true }; var customAnalyzer = new CustomAnalyzer() { Filter = new List <String>() { "lowercase", "word_delimiter_filter" }, Tokenizer = "whitespace" }; var createResult = client.CreateIndex(indexName, index => index.Analysis(a => a.TokenFilters(t => t.Add("word_delimiter_filter", word_delimiter_filter)).Analyzers(an => an.Add("custom", customAnalyzer))) .AddMapping <Tag>(tmd => MapTagCompletionFields(tmd))); return(createResult.ConnectionStatus.Success); }
private CreateIndexDescriptor GetCreateIndexDescriptor(string indexName, Type objectType) { #region [ Default analyzers and filters ] // Add custom index analyzers CustomAnalyzers.Add("full_string_index_analyzer", new CustomAnalyzer { Tokenizer = "standard", Filter = new List <string> { "standard", "string_delimeter", "stop", "asciifolding", "string_ngrams", "lowercase" } }); CustomAnalyzers.Add("full_keyword_index_analyzer", new CustomAnalyzer { Tokenizer = "keyword", Filter = new List <string> { "standard", "stop", "asciifolding" } }); // Add custom search analyzers CustomAnalyzers.Add("full_string_search_analyzer", new CustomAnalyzer { Tokenizer = "standard", Filter = new List <string> { "standard", "stop", "asciifolding", "lowercase" } }); #endregion // Create a default descriptor CreateIndexDescriptor descriptor = null; // Create default settings var settings = new IndexSettings() { NumberOfReplicas = 1, NumberOfShards = 2 }; // Add additional settings settings.Analysis = new Analysis(); settings.Analysis.TokenFilters = new TokenFilters(); settings.Analysis.Analyzers = new Analyzers(); //settings.Add("index.mapping.single_type", false); settings.Add("index.mapping.total_fields.limit", 2000); settings.Add("index.mapping.nested_fields.limit", 500); settings.Add("index.max_docvalue_fields_search", 500); // Create token filters var stringNGramsTokenFilter = new EdgeNGramTokenFilter { MinGram = 2, MaxGram = 20 }; var stringDelimiterTokenFilter = new WordDelimiterTokenFilter { GenerateWordParts = true, CatenateAll = true, CatenateNumbers = true, CatenateWords = true, SplitOnCaseChange = true, SplitOnNumerics = true, PreserveOriginal = true }; // Add filters settings.Analysis.TokenFilters.Add("string_ngrams", stringNGramsTokenFilter); settings.Analysis.TokenFilters.Add("string_delimeter", stringDelimiterTokenFilter); // Add analyzers CustomAnalyzers.ToList().ForEach(a => { settings.Analysis.Analyzers.Add(a.Key, a.Value); }); // Create the config var indexConfig = new IndexState { Settings = settings }; #region [ LogRecord Mapping ] // Fill the descriptor according to the type if (objectType == typeof(ESLogRecord)) { descriptor = new CreateIndexDescriptor(indexName) .InitializeUsing(indexConfig) .Mappings(ms => ms.Map <ESLogRecord>(m => m.AutoMap())); } #endregion return(descriptor); }