public override TokenAst TryMatch(CompilerContext context, ISourceStream source) { if (!source.MatchSymbol(_startSymbol, false)) { return(null); } source.Position += _startSymbol.Length; while (!source.EOF()) { int firstCharPos = source.Text.IndexOf(_endSymbol, source.Position); if (firstCharPos < 0) { source.Position = source.Text.Length; return(_isLineComment ? TokenAst.Create(this, context, source.TokenStart, source.GetLexeme()) : LRParser.CreateSyntaxErrorToken(context, source.TokenStart, "Unclosed comment block")); } source.Position = firstCharPos; if (source.MatchSymbol(_endSymbol, false)) { source.Position += _endSymbol.Length; return(TokenAst.Create(this, context, source.TokenStart, source.GetLexeme())); } source.Position++; } throw new NotSupportedException(); }
private ParserActionType GetActionTypeForOperation(TokenAst current) { ITerminal thisTerm = current.Terminal; for (int i = _stack.Count - 2; i >= 0; i--) { if (_stack[i].Node == null) { continue; } IGrammarTerm term = _stack[i].Node.Term; if (!term.IsSet(TermOptions.IsOperator)) { continue; } ITerminal prevTerm = term as ITerminal; if (prevTerm.Precedence == thisTerm.Precedence) { return(thisTerm.Associativity == Associativity.Left ? ParserActionType.Reduce : ParserActionType.Shift); } ParserActionType result = prevTerm.Precedence > thisTerm.Precedence ? ParserActionType.Reduce : ParserActionType.Shift; return(result); } return(ParserActionType.Shift); }
protected virtual TokenAst CreateToken(CompilerContext context, ISourceStream source) { string lexeme = source.GetLexeme(); TokenAst token = TokenAst.Create(this, context, source.TokenStart, lexeme, lexeme); return(token); }
private TokenAst MatchTerminals(TerminalList terminals) { TokenAst result = null; foreach (Terminal term in terminals) { if (result != null && result.Terminal.Priority > term.Priority) { break; } _source.Position = _source.TokenStart.Position; TokenAst token = term.TryMatch(_context, _source); if (token != null && (token.IsError() || result == null || token.Length > result.Length)) { result = token; } if (result != null && result.IsError()) { break; } } return(result); }
private TokenAst ReadToken() { if (_bufferedTokens.Count > 0) { TokenAst tkn = _bufferedTokens[0]; _bufferedTokens.RemoveAt(0); return(tkn); } SkipWhiteSpaces(); SetTokenStartLocation(); if (_source.EOF()) { return(TokenAst.Create(Grammar.Eof, _context, _source.TokenStart, string.Empty, Grammar.Eof.Name)); } TerminalList terms = SelectTerminals(_source.CurrentChar); TokenAst result = MatchTerminals(terms); if (result != null && !result.IsError()) { _source.Position = _source.TokenStart.Position + result.Length; return(result); } if (result == null) { result = Grammar.CreateSyntaxErrorToken(_context, _source.TokenStart, "Invalid character: '{0}'", _source.CurrentChar); } return(result); }
protected override TokenAst CreateToken(CompilerContext context, ISourceStream source) { TokenAst token = base.CreateToken(context, source); token.Value = ConvertNumber(token.Text); return(token); }
internal static TokenAst CreateSyntaxErrorToken(CompilerContext context, SourceLocation location, string message, params object[] args) { if (args != null && args.Length > 0) { message = string.Format(message, args); } return(TokenAst.Create(SyntaxError, context, location, message)); }
public void Prepare(CompilerContext context, ISourceStream source) { _context = context; _source = source; _currentToken = null; _bufferedTokens.Clear(); ResetSource(); }
private void NextToken() { _currentToken = ReadToken(); if (_currentToken == null) { _currentToken = TokenAst.Create(LRParser.Eof, _context, new SourceLocation(0, _currentLine - 1, 0), string.Empty); } }
protected override TokenAst CreateToken(CompilerContext context, ISourceStream source) { TokenAst token = base.CreateToken(context, source); if (Keywords.Contains(token.Text)) { token.IsKeyword = true; } return(token); }
public override TokenAst TryMatch(CompilerContext context, ISourceStream source) { if (!source.MatchSymbol(Symbol, false)) { return(null); } source.Position += Symbol.Length; TokenAst tokenAst = TokenAst.Create(this, context, source.TokenStart, Symbol); return(tokenAst); }
public static TokenAst Create(ITerminal term, CompilerContext context, SourceLocation location, string text, object value) { int textLen = text == null ? 0 : text.Length; var span = new SourceSpan(location, textLen); var args = new AstNodeArgs(term, span, null); var token = new TokenAst(args) { Text = text, Value = value }; return(token); }
public IEnumerable <TokenAst> BeginScan() { while (true) { _currentToken = ReadToken(); yield return(_currentToken); if (_currentToken.Terminal == Grammar.Eof) { yield break; } } }
private TokenAst ReadToken() { while (_input.MoveNext()) { TokenAst result = _input.Current; _tokenCount++; _currentLine = result.Span.Start.Line + 1; if (result.Terminal.IsSet(TermOptions.IsNonGrammar)) { continue; } return(result); } return(null); }
public static TokenAst Create(ITerminal term, CompilerContext context, SourceLocation location, string text, object value) { int textLen = text == null ? 0 : text.Length; var span = new SourceSpan(location, textLen); var args = new AstNodeArgs(term, span, null); var token = new TokenAst(args) { Text = text, Value = value }; return token; }
/* * private static List<string> _firsts = new List<string>() { "'", "\"", "@" }; */ #endregion #region Init public override TokenAst TryMatch(CompilerContext context, ISourceStream source) { bool isVerbatim = false; int start = source.Position; if (source.CurrentChar == '@') { isVerbatim = true; source.Position++; start++; } if (IsCurrentQuote(source)) { source.Position++; start++; } else { return(null); } while (!source.EOF()) { if (!isVerbatim) { if (source.CurrentChar == '\\') { //TODO: Escape processing source.Position += 2; continue; } if (LRParser.LineTerminators.IndexOf(source.CurrentChar) >= 0) { return(null); } } if (IsCurrentQuote(source)) { break; } source.Position++; } if (IsCurrentQuote(source)) { source.Position++; } else { return(null); } string lexeme = source.GetLexeme(); string body = source.Text.Substring(start, source.Position - start - 1); //TODO: handle this in escape processing if (!isVerbatim) { body = body.Replace("\\'", "'").Replace("\\\"", "\"").Replace("\\\\", "\\"); } TokenAst token = TokenAst.Create(this, context, source.TokenStart, lexeme, body); return(token); //return Grammar.CreateSyntaxErrorToken(context, source.TokenStart, "Failed to convert the value"); }