/// <summary>
 /// An analyzer of type simple that is built using a Lower Case Tokenizer.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/simple-analyzer.html
 /// </summary>
 public AnalyzerSettings Simple(AnalyzersDefaultAliases name, Func <SimpleAnalyzer, SimpleAnalyzer> simple = null)
 {
     return(Simple(name.AsString(), simple));
 }
 /// <summary>
 /// An analyzer of type whitespace that is built using a Whitespace Tokenizer.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/whitespace-analyzer.html
 /// </summary>
 public AnalyzerSettings Whitespace(AnalyzersDefaultAliases name, Func <WhitespaceAnalyzer, WhitespaceAnalyzer> whitespace = null)
 {
     return(Whitespace(name.AsString(), whitespace));
 }
 /// <summary>
 /// An analyzer of type standard that is built of using
 /// Standard Tokenizer, with Standard Token Filter, Lower Case Token Filter, and Stop Token Filter.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/standard-analyzer.html
 /// </summary>
 public AnalyzerSettings Standard(AnalyzersDefaultAliases name, Func <StandardAnalyzer, StandardAnalyzer> standard = null)
 {
     return(Standard(name.AsString(), standard));
 }
 /// <summary>
 /// An analyzer of type custom that allows to combine a Tokenizer with zero or more Token Filters, and zero or more Char Filters.
 /// The custom analyzer accepts a logical/registered name of the tokenizer to use, and a list of logical/registered names of token filters.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/custom-analyzer.html
 /// </summary>
 public AnalyzerSettings Custom(AnalyzersDefaultAliases name, Func <CustomAnalyzer, CustomAnalyzer> custom)
 {
     return(Custom(name.AsString(), custom));
 }
 /// <summary>
 /// An analyzer of type snowball that uses the standard tokenizer, with standard filter, lowercase filter, stop filter, and snowball filter.
 /// The Snowball Analyzer is a stemming analyzer from Lucene that is originally based on the snowball project from snowball.tartarus.org.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/snowball-analyzer.html
 /// </summary>
 public AnalyzerSettings Snowball(AnalyzersDefaultAliases name, Func <SnowballAnalyzer, SnowballAnalyzer> snowball = null)
 {
     return(Snowball(name.AsString(), snowball));
 }
 /// <summary>
 /// A set of analyzers aimed at analyzing specific language text.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/lang-analyzer.html
 /// </summary>
 public AnalyzerSettings Language(AnalyzersDefaultAliases name, Func <LanguageAnalyzer, LanguageAnalyzer> language)
 {
     return(Language(name.AsString(), language));
 }
 /// <summary>
 /// An analyzer of type pattern that can flexibly separate text into terms via a regular expression.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/pattern-analyzer.html
 /// </summary>
 public AnalyzerSettings Pattern(AnalyzersDefaultAliases name, Func <PatternAnalyzer, PatternAnalyzer> pattern = null)
 {
     return(Pattern(name.AsString(), pattern));
 }
 /// <summary>
 /// An analyzer of type keyword that “tokenizes” an entire stream as a single token.
 /// This is useful for data like zip codes, ids and so on.
 /// Note, when using mapping definitions, it make more sense to simply mark the field as not_analyzed.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/keyword-analyzer.html
 /// </summary>
 public AnalyzerSettings Keyword(AnalyzersDefaultAliases name, Func <KeywordAnalyzer, KeywordAnalyzer> keyword = null)
 {
     return(Keyword(name.AsString(), keyword));
 }
 /// <summary>
 /// An analyzer of type stop that is built using a Lower Case Tokenizer, with Stop Token Filter.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/stop-analyzer.html
 /// </summary>
 public AnalyzerSettings Stop(AnalyzersDefaultAliases name, Func <StopAnalyzer, StopAnalyzer> stop = null)
 {
     return(Stop(name.AsString(), stop));
 }