示例#1
0
        public void TestOtherLetterOffset()
        {
            String s = "a天b";
            ChineseTokenizer tokenizer = new ChineseTokenizer(new StringReader(s));

            int correctStartOffset = 0;
            int correctEndOffset = 1;
            IOffsetAttribute offsetAtt = tokenizer.GetAttribute<IOffsetAttribute>();
            while (tokenizer.IncrementToken())
            {
                Assert.AreEqual(correctStartOffset, offsetAtt.StartOffset);
                Assert.AreEqual(correctEndOffset, offsetAtt.EndOffset);
                correctStartOffset++;
                correctEndOffset++;
            }
        }
        public virtual void TestOtherLetterOffset()
        {
            string s = "a天b";
            ChineseTokenizer tokenizer = new ChineseTokenizer(new StringReader(s));

            int correctStartOffset = 0;
            int correctEndOffset = 1;
            IOffsetAttribute offsetAtt = tokenizer.GetAttribute<IOffsetAttribute>();
            tokenizer.Reset();
            while (tokenizer.IncrementToken())
            {
                assertEquals(correctStartOffset, offsetAtt.StartOffset());
                assertEquals(correctEndOffset, offsetAtt.EndOffset());
                correctStartOffset++;
                correctEndOffset++;
            }
            tokenizer.End();
            tokenizer.Dispose();
        }
示例#3
0
        public virtual void TestOtherLetterOffset()
        {
            string           s         = "a天b";
            ChineseTokenizer tokenizer = new ChineseTokenizer(new StringReader(s));

            int correctStartOffset     = 0;
            int correctEndOffset       = 1;
            IOffsetAttribute offsetAtt = tokenizer.GetAttribute <IOffsetAttribute>();

            tokenizer.Reset();
            while (tokenizer.IncrementToken())
            {
                assertEquals(correctStartOffset, offsetAtt.StartOffset);
                assertEquals(correctEndOffset, offsetAtt.EndOffset);
                correctStartOffset++;
                correctEndOffset++;
            }
            tokenizer.End();
            tokenizer.Dispose();
        }