public List<Token> Scan() { DFATokenizer tokenizer = new DFATokenizer(inputText); tokenizer.Build(); var tokensSequence = new List<TokenType>(); var curToken = tokenizer.GetNextToken(); while (curToken.TokenType != TokenType.EndOfFile) { if (curToken.TokenType == TokenType.Error) { ErrorsList.Add((TokenError)curToken); break; } if (curToken.TokenType != TokenType.WhiteSpace) { TokensList.Add(curToken); tokensSequence.Add(curToken.TokenType); } curToken = tokenizer.GetNextToken(); } FillSymbolTable(); FillComentsList(); return TokensList; }
public Scanner(string inputText) { tokenizer = new DFATokenizer(inputText); tokenizer.Build(); TokensList = new List<Token>(); SymbolTable = new HashSet<SymbolData>(); ErrorsList = new List<TokenError>(); CommentsList = new List<string>(); }