public void PASS_Create() { CustomAnalyzer analyzer = new CustomAnalyzer("name", new StandardTokenizer("std-tokenizer-name"), new List<ITokenFilter>() { new StandardTokenFilter("std-token-filter-name"), new LengthTokenFilter("length-token-filter-name") }, new List<ICharacterFilter>() { new MappingCharacterFilter("mapping-char-filter-name", "path"), new HtmlStripCharacterFilter("html-strip-char-filter-name") }); Assert.IsNotNull(analyzer); Assert.AreEqual("name", analyzer.Name); Assert.AreEqual("std-tokenizer-name", analyzer.Tokenizer.Name); Assert.IsInstanceOfType(analyzer.TokenFilters.First(), typeof(StandardTokenFilter)); Assert.AreEqual("std-token-filter-name", analyzer.TokenFilters.First().Name); Assert.IsInstanceOfType(analyzer.TokenFilters.Last(), typeof(LengthTokenFilter)); Assert.AreEqual("length-token-filter-name", analyzer.TokenFilters.Last().Name); Assert.IsInstanceOfType(analyzer.CharacterFilters.First(), typeof(MappingCharacterFilter)); Assert.AreEqual("mapping-char-filter-name", analyzer.CharacterFilters.First().Name); Assert.IsInstanceOfType(analyzer.CharacterFilters.Last(), typeof(HtmlStripCharacterFilter)); Assert.AreEqual("html-strip-char-filter-name", analyzer.CharacterFilters.Last().Name); }
public void PASS_Serialize() { CustomAnalyzer analyzer = new CustomAnalyzer("name", new StandardTokenizer("std-tokenizer-name"), new List<ITokenFilter>() { new StandardTokenFilter("std-token-filter-name"), new LengthTokenFilter("length-token-filter-name") }, new List<ICharacterFilter>() { new MappingCharacterFilter("mapping-char-filter-name", "path"), new HtmlStripCharacterFilter("html-strip-char-filter-name") }); string json = JsonConvert.SerializeObject(analyzer); Assert.IsNotNull(json); string expectedJson = "{\"name\":{\"type\":\"custom\",\"tokenizer\":\"std-tokenizer-name\",\"filter\":[\"std-token-filter-name\",\"length-token-filter-name\"],\"char_filter\":[\"mapping-char-filter-name\",\"html-strip-char-filter-name\"]}}"; Assert.AreEqual(expectedJson, json); }
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { Dictionary<string, object> analysisDict = serializer.Deserialize<Dictionary<string, object>>(reader); if (_AnalysisSettings == null) { throw new Exception("AnalysisSettings must be present to deserialize a custom analyzer."); } Dictionary<string, object> analyzerDict = null; if (analysisDict.ContainsKey(_ANALYZER)) { analyzerDict = JsonConvert.DeserializeObject<Dictionary<string, object>>(analysisDict.GetString(_ANALYZER)); } else { analyzerDict = analysisDict; } Dictionary<string, object> fieldDict = JsonConvert.DeserializeObject<Dictionary<string, object>>(analyzerDict.First().Value.ToString()); string tokenizerName = fieldDict.GetString(_TOKENIZER); ITokenizer tokenizer = _AnalysisSettings.Tokenizers.FirstOrDefault(x => x.Name.Equals(tokenizerName)); if (tokenizer == null) { TokenizerTypeEnum tokenizerType = TokenizerTypeEnum.Standard; tokenizerType = TokenizerTypeEnum.Find(tokenizerName); try { tokenizer = Activator.CreateInstance(tokenizerType.ImplementationType, new object[] { tokenizerName }) as ITokenizer; } catch { throw new TokenizerNotDefinedException(tokenizerName); } } CustomAnalyzer analyzer = new CustomAnalyzer(analyzerDict.First().Key, tokenizer); AnalyzerBase.Deserialize(analyzer, fieldDict); if (fieldDict.ContainsKey(_TOKEN_FILTERS)) { List<ITokenFilter> filters = new List<ITokenFilter>(); foreach (string tokenFilterName in JsonConvert.DeserializeObject<IEnumerable<string>>(fieldDict.GetString(_TOKEN_FILTERS))) { ITokenFilter tokenFilter = _AnalysisSettings.TokenFilters.FirstOrDefault(x => x.Name.Equals(tokenFilterName)); if (tokenFilter == null) { TokenFilterTypeEnum filterType = TokenFilterTypeEnum.Standard; filterType = TokenFilterTypeEnum.Find(tokenFilterName); try { tokenFilter = Activator.CreateInstance(filterType.ImplementationType, new object[] { tokenFilterName }) as ITokenFilter; } catch { throw new TokenFilterNotDefinedException(tokenFilterName); } } filters.Add(tokenFilter); } if (filters.Any()) { analyzer.TokenFilters = filters; } } if (fieldDict.ContainsKey(_CHARACTER_FILTERS)) { List<ICharacterFilter> charFilters = new List<ICharacterFilter>(); foreach (string charFilterName in JsonConvert.DeserializeObject<IEnumerable<string>>(fieldDict.GetString(_CHARACTER_FILTERS))) { ICharacterFilter charFilter = _AnalysisSettings.CharacterFilters.FirstOrDefault(x => x.Name.Equals(charFilterName)); if (charFilter == null) { CharacterFilterTypeEnum filterType = CharacterFilterTypeEnum.Mapping; filterType = CharacterFilterTypeEnum.Find(charFilterName); try { charFilter = Activator.CreateInstance(filterType.ImplementationType, new object[] { charFilterName }) as ICharacterFilter; } catch { throw new CharacterFilterNotDefinedException(charFilterName); } } charFilters.Add(charFilter); } if (charFilters.Any()) { analyzer.CharacterFilters = charFilters; } } return analyzer; }