Пример #1
0
        private List <LexToken> Seg(List <LexToken> rawTokens)
        {
            TokenSegmenter  segmenter = new TokenSegmenter(this.ExpContext.ProcContext.ProcSegmenter);
            List <LexToken> tokens    = new List <LexToken>();

            foreach (var tok in rawTokens)
            {
                //if (tok.Text.StartsWith("之差") || tok.Text.StartsWith("之"))
                //{
                //    Debugr.WriteLine(tok.Text);
                //}
                if (tok.IsKind(TokenKindKeyword.Ident))
                {
                    LexToken[] newTokens = segmenter.Split(tok);
                    tokens.AddRange(newTokens);
                }
                else //if (tok.Kind != TokenKindSymbol.NewLine)
                {
                    tokens.Add(tok);
                }
            }
            //if (tokens.Count > 1)
            //{
            //    Console.WriteLine(string.Join(" ", RawTokens.Select(P => P.GetText())));
            //    Console.WriteLine(string.Join(" ", tokens.Select(P => P.GetText())));
            //}
            return(tokens);
        }
Пример #2
0
        public List <LexToken> SegToken(List <LexToken> RawTokens)
        {
            TokenSegmenter  segmenter = new TokenSegmenter(this.context.ProcSegmenter);
            List <LexToken> tokens    = new List <LexToken>();

            foreach (var tok in RawTokens)
            {
                if (tok.IsKind(TokenKindKeyword.Ident))
                {
                    //if (tok.Text.IndexOf("之") != -1)
                    //{
                    //    Debugr.WriteLine("切割:"+tok.Text);
                    //}
                    LexToken[] newTokens = segmenter.Split(tok);
                    tokens.AddRange(newTokens);
                }
                else
                {
                    tokens.Add(tok);
                }
            }
            return(tokens);
        }