コード例 #1
0
        public void SerializeAnalyzerWithAsciifoldingFilter()
        {
            const string targetJson =
                "\"analysis\":{\"filter\":{\"my_ascii_folding\":{\"type\":\"asciifolding\",\"preserve_original\":true}},\"analyzer\":{\"default\":{\"type\":\"custom\",\"tokenizer\":\"standard\",\"filter\":[\"standard\",\"my_ascii_folding\"]}}}";

            var analysis = new Analysis
            {
                Analyzer =
                {
                    Analyzers = new List<AnalyzerBase>
                    {
                        new CustomAnalyzer("default")
                        {
                            Tokenizer = DefaultTokenizers.Standard,
                            Filter = new List<string> {"standard", "my_ascii_folding"}
                        }
                    }
                },
                Filters =
                {
                    CustomFilters = new List<AnalysisFilterBase> {new AsciifoldingTokenFilter("my_ascii_folding") {PreserveOriginal = true}}
                }
            };

            var jsonWriter = new ElasticsearchCrudJsonWriter();
            analysis.WriteJson(jsonWriter);
            var result = jsonWriter.GetJsonString();
            Console.WriteLine(result);
            Assert.AreEqual(targetJson, result);
        }
コード例 #2
0
        public void SerializeAnalyzerWithEdgeNGramTokenizer()
        {
            const string targetJson =
                "\"analysis\":{\"tokenizer\":{\"my_edgengram_tokenizer\":{\"type\":\"edgeNGram\",\"min_gram\":2,\"max_gram\":4,\"token_chars\":[\"digit\",\"letter\"]}},\"analyzer\":{\"default\":{\"type\":\"custom\",\"tokenizer\":\"my_edgengram_tokenizer\"}}}";

            var analysis = new Analysis
            {
                Analyzer =
                {
                    Analyzers = new List<AnalyzerBase>
                    {
                        new CustomAnalyzer("default")
                        {
                            Tokenizer = "my_edgengram_tokenizer"
                        }
                    }
                },
                Tokenizers =
                {
                    CustomTokenizers = new List<AnalysisTokenizerBase>
                    {
                        new EdgeNGramTokenizer("my_edgengram_tokenizer")
                        {
                            MaxGram = 4,
                            MinGram = 2,
                            TokenChars = new List<TokenChar> {TokenChar.digit, TokenChar.letter}
                        }
                    }
                }
            };

            var jsonWriter = new ElasticsearchCrudJsonWriter();
            analysis.WriteJson(jsonWriter);
            var result = jsonWriter.GetJsonString();
            Console.WriteLine(result);
            Assert.AreEqual(targetJson, result);
        }
コード例 #3
0
        public void SerializeAnalyzerWithStandardTokenizer()
        {
            const string targetJson =
                "\"analysis\":{\"tokenizer\":{\"bigger_limit_token_count\":{\"type\":\"standard\",\"max_token_length\":360}},\"analyzer\":{\"default\":{\"type\":\"custom\",\"tokenizer\":\"bigger_limit_token_count\"}}}";

            var analysis = new Analysis
            {
                Analyzer =
                {
                    Analyzers = new List<AnalyzerBase>
                    {
                        new CustomAnalyzer("default")
                        {
                            Tokenizer = "bigger_limit_token_count"
                        }
                    }
                },
                Tokenizers =
                {
                    CustomTokenizers = new List<AnalysisTokenizerBase>
                    {
                        new StandardTokenizer("bigger_limit_token_count") {MaxTokenLength = 360}
                    }
                }
            };

            var jsonWriter = new ElasticsearchCrudJsonWriter();
            analysis.WriteJson(jsonWriter);
            var result = jsonWriter.GetJsonString();
            Console.WriteLine(result);
            Assert.AreEqual(targetJson, result);
        }
コード例 #4
0
        public void SerializeBlocksAnalyzer()
        {
            const string targetJson = "\"analysis\":{\"analyzer\":{\"blocks_analyzer\":{\"type\":\"custom\",\"tokenizer\":\"whitespace\",\"filter\":[\"lowercase\",\"blocks_filter\",\"shingle\"]}}}";
            var analysis = new Analysis
            {
                Analyzer =
                {
                    Analyzers = new List<AnalyzerBase>
                    {
                        new CustomAnalyzer("blocks_analyzer")
                        {
                            Tokenizer = DefaultTokenizers.Whitespace,
                            Filter = new List<string> {"lowercase", "blocks_filter", "shingle"}
                        },

                    }
                }
            };

            var jsonWriter = new ElasticsearchCrudJsonWriter();
            analysis.WriteJson(jsonWriter);
            var result = jsonWriter.GetJsonString();
            Console.WriteLine(result);
            Assert.AreEqual(targetJson,result);
        }
コード例 #5
0
 public IndexUpdateSettings()
 {
     Analysis = new Analysis();
     Similarities = new Similarities();
 }