Example #1
0
 public IPromise <IIndexSettings> ConfigureNonConcatenateAndAutocompleteSettings(IndexSettingsDescriptor setting)
 {
     return(setting
            .NumberOfShards(_numberOfShards)
            .NumberOfReplicas(_numberOfReplicas)
            .Setting("index.gc_deletes", "1h")); // 1 hour
 }
Example #2
0
 public IPromise <IIndexSettings> ConfigureConcatenateAndAutocompleteSettings(IndexSettingsDescriptor setting)
 {
     return(setting
            .NumberOfShards(_numberOfShards)
            .NumberOfReplicas(_numberOfReplicas)
            .Analysis(analysis => ConfigureConcatenateAndAutocompleteAnalysis(analysis))
            .Setting("index.max_ngram_diff", int.MaxValue)
            .Setting("index.gc_deletes", "1h")); // 1 hour
 }
Example #3
0
        private static IndexSettingsDescriptor GetIndexSettings()
        {
            #region TokenFilters

            //We add the danish stop word and stemmer filters.
            var tokenFilters = new Dictionary <string, ITokenFilter>
            {
                {
                    "english_stop",
                    new StopTokenFilter
                    {
                        StopWords = "_english_"
                    }
                },
                {
                    "english_snow",
                    new SnowballTokenFilter()
                    {
                        Language = SnowballLanguage.English
                    }
                },
            };

            #endregion

            #region Analyzers

            //We create a custom analyzer to use in the index
            var analyzers = new Dictionary <string, IAnalyzer>
            {
                {
                    "english",
                    new CustomAnalyzer
                    {
                        CharFilter = new[] { "html_strip" },
                        Filter     = new [] { "lowercase", "english_stop", "english_snow", "asciifolding" },
                        Tokenizer  = "standard"
                    }
                },
                {
                    "default",
                    new CustomAnalyzer
                    {
                        CharFilter = new[] { "html_strip" },
                        Filter     = new[] { "lowercase", "asciifolding" },
                        Tokenizer  = "standard"
                    }
                }
            };

            #endregion

            var indexSettings = new IndexSettingsDescriptor();

            indexSettings.NumberOfShards(1);
            indexSettings.NumberOfReplicas(0);
            indexSettings.Analysis(d => new Analysis
            {
                TokenFilters = new TokenFilters(tokenFilters),
                Analyzers    = new Analyzers(analyzers)
            });
            return(indexSettings);
        }