/// <summary> /// Returns the next token, and advances the tokenizer. /// </summary> /// <returns>The next token</returns> public TokenExt Next() { if (_tokenizer.IsEndOfFile) { return(null); } var token = _tokenizer.GetNextToken(); if (token.Kind == TokenKind.EndOfFile) { return(null); } var tokenSpan = _tokenizer.TokenSpan; var line = _tokenizer.CurrentLine; var lineStart = GetLineStartIndex(line); var isMultiLine = tokenSpan.Start < lineStart; var tokenExt = new TokenExt( token, _tokenizer.PreceedingWhiteSpace, tokenSpan, line, isMultiLine, Current ); if (tokenExt.IsClose) { if (_insides.Count == 0 || !_insides.Peek().MatchesClose(tokenExt)) { throw new Exception($"Close bracket ({token.Kind}) has no matching open"); } _insides.Pop(); } else if (tokenExt.Kind == TokenKind.Colon && _insides.Count != 0 && _insides.Peek().Kind == TokenKind.KeywordLambda) { _insides.Pop(); } if (_insides.TryPeek(out TokenExt inside)) { tokenExt.Inside = inside; } if (tokenExt.IsOpen || tokenExt.Kind == TokenKind.KeywordLambda) { _insides.Push(tokenExt); } if (Current != null) { Current.Next = tokenExt; } Current = tokenExt; return(tokenExt); }
public TokenExt(Token token, string precedingWhitespace, IndexSpan span, int line, bool isMultiLine, TokenExt prev) { Token = token; PrecedingWhitespace = precedingWhitespace; Span = span; Line = line; Prev = prev; IsMultilineString = IsString && isMultiLine; }
/// <summary> /// Returns the next token without advancing the tokenizer. Note that /// the returned token's Next will not be set until the tokenizer /// actually reads that next token. /// </summary> /// <returns>The next token</returns> public TokenExt Peek() { if (_peeked != null) { return(_peeked); } _peeked = Next(); return(_peeked); }
public bool MatchesClose(TokenExt other) { switch (Kind) { case TokenKind.LeftBrace: return(other.Kind == TokenKind.RightBrace); case TokenKind.LeftBracket: return(other.Kind == TokenKind.RightBracket); case TokenKind.LeftParenthesis: return(other.Kind == TokenKind.RightParenthesis); } return(false); }
private void AddToken(TokenExt token) { var line = token.Line; // Explicit line joins ("\") appear at the end of a line, but // their span ends on another line, so move backward so they can // be inserted in the right place. if (token.Kind == TokenKind.ExplicitLineJoin) { line--; } if (!_lineTokens.TryGetValue(line, out List <TokenExt> tokens)) { tokens = new List <TokenExt>(); _lineTokens.Add(line, tokens); } tokens.Add(token); }
/// <summary> /// Returns the next token, and advances the tokenizer. /// </summary> /// <returns>The next token</returns> public TokenExt Next() { if (_tokenizer.IsEndOfFile) { return(null); } var token = _tokenizer.GetNextToken(); if (token.Kind == TokenKind.EndOfFile) { return(null); } var tokenSpan = _tokenizer.TokenSpan; var line = _tokenizer.CurrentLine; var lineStart = GetLineStartIndex(line); var isMultiLine = tokenSpan.Start < lineStart; var tokenExt = new TokenExt( token, _tokenizer.PrecedingWhiteSpace, tokenSpan, line, isMultiLine, Current ); if (tokenExt.IsClose) { if (_insides.Count > 0 && _insides.Peek().MatchesClose(tokenExt)) { _insides.Pop(); } else { // This close doesn't match, so assume that the token is just a stray // and do nothing to the _insides stack. // Keep track of the first unmatched token to present back to the user. if (Unmatched == null) { Unmatched = tokenExt; } } } else if (tokenExt.Kind == TokenKind.Colon && _insides.Count != 0 && _insides.Peek().Kind == TokenKind.KeywordLambda) { _insides.Pop(); } if (_insides.TryPeek(out TokenExt inside)) { tokenExt.Inside = inside; } if (tokenExt.IsOpen || tokenExt.Kind == TokenKind.KeywordLambda) { _insides.Push(tokenExt); } if (Current != null) { Current.Next = tokenExt; } Current = tokenExt; return(tokenExt); }
private static void AppendTokenEnsureWhiteSpacesAround(StringBuilder builder, TokenExt token) => builder.EnsureEndsWithWhiteSpace() .Append(token) .EnsureEndsWithWhiteSpace();
/// <summary> /// Returns the next token, and advances the tokenizer. Note that /// the returned token's Next will not be set until the tokenizer /// actually reads that next token. /// </summary> /// <returns>The next token</returns> public TokenExt Next() { if (_peeked != null) { var tmp = _peeked; _peeked = null; return(tmp); } if (_tokenizer.IsEndOfFile) { return(null); } var token = _tokenizer.GetNextToken(); if (token.Kind == TokenKind.EndOfFile) { return(null); } var tokenSpan = _tokenizer.TokenSpan; var sourceSpan = new SourceSpan(_tokenizer.IndexToLocation(tokenSpan.Start), _tokenizer.IndexToLocation(tokenSpan.End)); var tokenExt = new TokenExt { Token = token, PreceedingWhitespace = _tokenizer.PreceedingWhiteSpace, Span = sourceSpan, Prev = _prev }; if (tokenExt.IsClose) { if (_insides.Count == 0 || !_insides.Peek().MatchesClose(tokenExt)) { throw new Exception($"Close bracket ({token.Kind}) has no matching open"); } _insides.Pop(); } else if (tokenExt.Kind == TokenKind.Colon && _insides.Count != 0 && _insides.Peek().Kind == TokenKind.KeywordLambda) { _insides.Pop(); } if (_insides.TryPeek(out TokenExt inside)) { tokenExt.Inside = inside; } if (tokenExt.IsOpen || tokenExt.Kind == TokenKind.KeywordLambda) { _insides.Push(tokenExt); } if (_prev != null) { _prev.Next = tokenExt; } _prev = tokenExt; return(tokenExt); }