public Token Eat(TokenInfo.TokenType tokenType, bool facultative = true) { if (LookAhead().Type != tokenType) { if (facultative) { throw new ParserError(new FailedEatToken(tokenType), Cursor); } else { throw new ParserError(new ExpectedTokenException(tokenType), Cursor); } } else if (Tokens.Length == 0) { throw new ParserError(new NoTokenLeft(), Cursor); } if (LookAhead().Type != TokenInfo.TokenType.EOL) { Cursor = LookAhead().Pos; } else { Cursor = new CodePosition(LookAhead().Pos.Line + 1, 1); } LookAheadIndex++; return(Tokens[LookAheadIndex - 1]); }
private void Tokenize(string code) { var result = new List <Token>(); int colTrack = 1; int lineTrack = 1; while (code.Length > 0) { if (GetEscapeSequence(code[0]) == @"\u000D") { code = code.Substring(1); continue; } foreach ((string, TokenInfo.TokenType)regexPair in Char.IsLetter(code[0]) ? TokenInfo.IdenRegexTable : TokenInfo.OtherRegexTable) { TokenInfo.TokenType tokenType = regexPair.Item2; Match match = new Regex(regexPair.Item1).Match(code); if (match.Success) { int matchLength = match.Length; string matchValue = match.Value; if (tokenType != TokenInfo.TokenType.SPACE && tokenType != TokenInfo.TokenType.TAB) { if (tokenType == TokenInfo.TokenType.STRING_LIT || tokenType == TokenInfo.TokenType.CHAR_LIT) { matchValue = matchValue.Substring(1).Remove(matchLength - 2); } if (tokenType != TokenInfo.TokenType.SINGLE_LINE_COMMENT && tokenType != TokenInfo.TokenType.MULTI_LINE_COMMENT) { result.Add(new Token(matchValue.Trim(), tokenType, new CodePosition(lineTrack, colTrack))); } } if (tokenType == TokenInfo.TokenType.EOL) { colTrack = 1; lineTrack++; } else { colTrack += match.Length; } code = code.Substring(match.Length); break; } } } Tokens = result.ToArray(); }
public VarMutation(TokenInfo.TokenType mutationOp, Expr baseVariable, Expr newValue) { BaseVariable = baseVariable; MutationOp = mutationOp; NewValue = newValue; }
public Token(string value, TokenInfo.TokenType type, CodePosition pos) { Value = value; Type = type; Pos = pos; }
public static T[] ParseSequence <T>(Parser parser, Func <Parser, T> consumer, TokenInfo.TokenType delimiter = TokenInfo.TokenType.COMMA) { var sequence = new List <T>(); bool lastlyEaten = false; Token lastTokenEaten = null; while (true) { try { sequence.Add(parser.TryConsumer(consumer)); lastlyEaten = false; lastTokenEaten = parser.Eat(delimiter); lastlyEaten = true; } catch (ParserError ex) { if (!ex.IsExceptionFictive()) { throw ex; } break; } } if (lastlyEaten) { throw new ParserError( new UnexpectedTokenException(delimiter), lastTokenEaten.Pos ); } return(sequence.ToArray()); }
public ExpectedTokenException(TokenInfo.TokenType tokenType) { TokenType = tokenType; }
public FailedEatToken(TokenInfo.TokenType tokenType) { TokenType = tokenType; }
public static Func <Parser, Token> CurryEat(TokenInfo.TokenType tokenType) => (Parser p) => p.Eat(tokenType);