public Lexer(Tokenizer tokenizer) { _tokenizer = tokenizer; _buffer = new StringBuilder(64); _escapeBuffer = new StringBuilder(3); _next = LexToken(); }
//public static string StringOf(Token token) //{ // return token == null ? "end of input" : string.Format("'{0}'", token.Tokenizer); //} public static string StringOf(Token[] tokens) { var builder = new StringBuilder(4 * tokens.Length); foreach (var token in tokens) { builder.Append("'"); builder.Append(StringOf(token)); builder.Append("', "); } return builder.ToString(0, builder.Length - 2); }
public Token NextToken() { _current = _next; _next = LexToken(); return _current; }
private void Consume() { _current = _scanner.NextToken(); if (_current == null) { return; } if (_current.Type == TokenType.LeftParenthesis) { _brackets++; } else if (_current.Type == TokenType.RightParenthesis) { _brackets--; } }
public static string StringOf(Token token) { if (token == null) { return "end of input"; } if (token.Type == TokenType.Literal || token.Type == TokenType.Identifier) { return token.Text; } return StringOf(token.Type); }