public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
            {
                TokenStream result = new StandardTokenizer(reader);

                result = new TestFilter(enclosingInstance, result);
                result = new LowerCaseFilter(result);
                return(result);
            }
Example #2
0
            public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
            {
                TokenStream result = new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader);

                result = new TestPosIncrementFilter(enclosingInstance, result);
                result = new LowerCaseFilter(result);
                return(result);
            }
Example #3
0
        public void JieBaLuceneTokenizerAdapterTest()
        {
            string    text                 = "SQLiteFtsTokenizer 只有在 PrepareToStart 方法时才能取得要切分的字符串。";
            var       reader               = text;
            Tokenizer tokenizer            = new Lucene.Net.Analysis.Standard.StandardTokenizer(Lucene.Net.Util.LuceneVersion.LUCENE_48, new StringReader(" "));
            LuceneTokenizerAdapter adapter = new LuceneTokenizerAdapter(tokenizer);
            var r = adapter.TestMe(text);

            Assert.Pass();
        }
Example #4
0
 public static string[] GetTokens(string sentence)
 {
     List<string> tokens = new List<string>();
     TextReader tr = new StringReader(sentence);
     Lucene.Net.Analysis.Standard.StandardTokenizer t = new Lucene.Net.Analysis.Standard.StandardTokenizer(tr);
     Token T = t.Next();
     while (T != null) {
         tokens.Add(T.TermText());
         T = t.Next();
     }
     t.Close();
     tr.Close();
     return tokens.ToArray();
 }
			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
			{
				TokenStream result = new StandardTokenizer(reader);
				result = new TestPosIncrementFilter(enclosingInstance, result);
				result = new LowerCaseFilter(result);
				return result;
			}
Example #6
0
 public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 {
     TokenStream result = new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader);
     result = new TestFilter(enclosingInstance, result);
     result = new LowerCaseFilter(result);
     return result;
 }
Example #7
0
 public static IEnumerable<string> InnerGetLuceneTokens(string text)
 {
     TextReader tr = new StringReader(text);
     TokenStream tok = new Lucene.Net.Analysis.Standard.StandardTokenizer(tr);
     tok = new LowerCaseFilter(tok);
     tok = new PorterStemFilter(tok);
     Token t = tok.Next();
     while (t != null) {
         yield return t.TermText();
         t = tok.Next();
     }
     tr.Close();
 }
Example #8
0
				public AnonymousClassTokenFilter(AnonymousClassAnalyzer enclosingInstance, StandardTokenizer standardTokenizer) : base(standardTokenizer)
				{
					InitBlock(enclosingInstance);
				}