public Lexicon() { m_tokenList = new List<TokenInfo>(); m_lexerStates = new List<Lexer>(); m_defaultState = new Lexer(this, 0); m_lexerStates.Add(m_defaultState); }
internal Lexer DefineLexer(Lexer baseLexer) { int index = m_lexerStates.Count; Lexer newState = new Lexer(this, index, baseLexer); m_lexerStates.Add(newState); return newState; }
internal TokenInfo(RegularExpression definition, Lexicon lexicon, Lexer state, Token tag) { Lexicon = lexicon; Definition = definition; State = state; Tag = tag; }
internal TokenInfo AddToken(RegularExpression definition, Lexer state, int indexInState, string description) { int index = m_tokenList.Count; Token tag = new Token(index, description ?? definition.ToString(), state.Index); TokenInfo token = new TokenInfo(definition, this, state, tag); m_tokenList.Add(token); return token; }
internal Lexer(Lexicon lexicon, int index, Lexer baseLexer) { Children = new List<Lexer>(); Lexicon = lexicon; BaseLexer = baseLexer; m_tokens = new List<TokenInfo>(); Index = index; if (baseLexer == null) { Level = 0; } else { Level = baseLexer.Level + 1; baseLexer.Children.Add(this); } }