public void PASS_Create()
        {
            DictionaryDecompounderTokenFilter filter = new DictionaryDecompounderTokenFilter("name", "path");

            Assert.IsNotNull(filter);
            Assert.AreEqual("name", filter.Name);
            Assert.AreEqual("path", filter.WordListPath);
        }
        public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
        {
            Dictionary<string, object> filterDict = serializer.Deserialize<Dictionary<string, object>>(reader);
            Dictionary<string, object> fieldDict = JsonConvert.DeserializeObject<Dictionary<string, object>>(filterDict.First().Value.ToString());

            string filterName = filterDict.First().Key;
            string wordListPath = null;
            IEnumerable<string> wordList = null;
            if (fieldDict.ContainsKey(_WORD_LIST))
            {
                wordList = JsonConvert.DeserializeObject<IEnumerable<string>>(fieldDict.GetString(_WORD_LIST));
            }
            else if (fieldDict.ContainsKey(_WORD_LIST_PATH))
            {
                wordListPath = fieldDict.GetStringOrDefault(_WORD_LIST_PATH);
            }
            else
            {
                throw new RequiredPropertyMissingException(_WORD_LIST + "/" + _WORD_LIST_PATH);
            }

            CompoundWordTokenFilter filter = null;

            string tokenFilterStr = fieldDict.GetString(_TYPE);
            TokenFilterTypeEnum tokenFilter = TokenFilterTypeEnum.HyphenationDecompounder;
            tokenFilter = TokenFilterTypeEnum.Find(tokenFilterStr);
            if(tokenFilter == null)
            {
                throw new Exception(tokenFilterStr + " is not a valid token filter.");
            }
            else if(tokenFilter == TokenFilterTypeEnum.DictionaryDecompounder)
            {
                if (wordList != null)
                    filter = new DictionaryDecompounderTokenFilter(filterName, wordList);
                else
                    filter = new DictionaryDecompounderTokenFilter(filterName, wordListPath);
            }
            else if(tokenFilter == TokenFilterTypeEnum.HyphenationDecompounder)
            {
                if (wordList != null)
                    filter = new HyphenationDecompounderTokenFilter(filterName, wordList);
                else
                    filter = new HyphenationDecompounderTokenFilter(filterName, wordListPath);
            }
            else
            {
                throw new Exception(tokenFilterStr + " is not a valid compound word token filter.");
            }

            TokenFilterBase.Deserialize(filter, fieldDict);

            filter.MaximumSubWordSize = fieldDict.GetInt32(_MAXIMUM_SUBWORD_SIZE, CompoundWordTokenFilter._MAXIMUM_SUBWORD_SIZE_DEFAULT);
            filter.MinimumSubWordSize = fieldDict.GetInt32(_MINIMUM_SUBWORD_SIZE, CompoundWordTokenFilter._MINIMUM_SUBWORD_SIZE_DEFAULT);
            filter.MinimumWordSize = fieldDict.GetInt32(_MINIMUM_WORD_SIZE, CompoundWordTokenFilter._MINIMUM_WORD_SIZE_DEFAULT);
            filter.OnlyLongestMatch = fieldDict.GetBool(_ONLY_LONGEST_MATCH, CompoundWordTokenFilter._ONLY_LONGEST_MATCH_DEFAULT);

            return filter;
        }
        public void PASS_Serialize()
        {
            DictionaryDecompounderTokenFilter filter = new DictionaryDecompounderTokenFilter("name", "path");

            string json = JsonConvert.SerializeObject(filter);
            Assert.IsNotNull(json);

            string expectedJson = "{\"name\":{\"type\":\"dictionary_decompounder\",\"word_list_path\":\"path\"}}";
            Assert.AreEqual(expectedJson, json);
        }