internal CustomAnalyzer(string oDataType, string name, LexicalTokenizerName tokenizerName, IList <TokenFilterName> tokenFilters, IList <string> charFilters) : base(oDataType, name) { TokenizerName = tokenizerName; TokenFilters = tokenFilters; CharFilters = charFilters; ODataType = oDataType ?? "#Microsoft.Azure.Search.CustomAnalyzer"; }
/// <summary> Initializes a new instance of CustomAnalyzer. </summary> /// <param name="name"> The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. </param> /// <param name="tokenizerName"> The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as breaking a sentence into words. </param> /// <exception cref="ArgumentNullException"><paramref name="name"/> is null.</exception> public CustomAnalyzer(string name, LexicalTokenizerName tokenizerName) : base(name) { TokenizerName = tokenizerName; TokenFilters = new List <TokenFilterName>(); CharFilters = new List <string>(); ODataType = "#Microsoft.Azure.Search.CustomAnalyzer"; }
/// <summary> /// Initializes a new instance of AnalyzeRequest. /// </summary> /// <param name="text">Required text to break into tokens.</param> /// <param name="tokenizerName">The name of the tokenizer to use to break the given <paramref name="text"/>.</param> /// <exception cref="ArgumentNullException"><paramref name="text"/> is null.</exception> public AnalyzeTextOptions(string text, LexicalTokenizerName tokenizerName) { Text = text ?? throw new ArgumentNullException(nameof(text)); TokenizerName = tokenizerName; TokenFilters = new List <TokenFilterName>(); CharFilters = new List <string>(); }
internal static CustomAnalyzer DeserializeCustomAnalyzer(JsonElement element) { LexicalTokenizerName tokenizer = default; Optional <IList <TokenFilterName> > tokenFilters = default; Optional <IList <string> > charFilters = default; string odataType = default; string name = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("tokenizer")) { tokenizer = new LexicalTokenizerName(property.Value.GetString()); continue; } if (property.NameEquals("tokenFilters")) { if (property.Value.ValueKind == JsonValueKind.Null) { property.ThrowNonNullablePropertyIsNull(); continue; } List <TokenFilterName> array = new List <TokenFilterName>(); foreach (var item in property.Value.EnumerateArray()) { array.Add(new TokenFilterName(item.GetString())); } tokenFilters = array; continue; } if (property.NameEquals("charFilters")) { if (property.Value.ValueKind == JsonValueKind.Null) { property.ThrowNonNullablePropertyIsNull(); continue; } List <string> array = new List <string>(); foreach (var item in property.Value.EnumerateArray()) { array.Add(item.GetString()); } charFilters = array; continue; } if (property.NameEquals("@odata.type")) { odataType = property.Value.GetString(); continue; } if (property.NameEquals("name")) { name = property.Value.GetString(); continue; } } return(new CustomAnalyzer(odataType, name, tokenizer, Optional.ToList(tokenFilters), Optional.ToList(charFilters))); }
public CustomAnalyzer(string name, LexicalTokenizerName tokenizer) : base(name) { if (name == null) { throw new ArgumentNullException(nameof(name)); } Tokenizer = tokenizer; ODataType = "#Microsoft.Azure.Search.CustomAnalyzer"; }