private PreprocessorTokenType PeekType() { if (token == null) { token = NextToken(); } return(token.Type); }
private PreprocessorToken Eat(PreprocessorTokenType type) { if (PeekType() != type) { ReportFormattedError(PreprocessorError.TokenExpected, PreprocessorToken.TypeString(type)); return(null); } return(NextToken()); }
public PreprocessorKeywords(NameTable symbolTable) { keywords = new Hashtable(); for (PreprocessorTokenType token = 0; token < PreprocessorTokenType.Identifier; token += 1) { keywords.Add(symbolTable.Add(PreprocessorToken.TypeString(token)), token); } }
private PreprocessorToken NextToken() { if (this.token == null) { return(lexer.NextToken(text)); } else { PreprocessorToken token = this.token; this.token = null; return(token); } }
public PreprocessorLine Parse(TextBuffer text, IDictionary defines) { this.text = text; this.defines = defines; Debug.Assert(text.PeekChar() == '#'); text.NextChar(); PreprocessorToken token = NextToken(); PreprocessorTokenType type = token.Type; switch (type) { case PreprocessorTokenType.Define: case PreprocessorTokenType.Undef: token = Eat(PreprocessorTokenType.Identifier); EatEndOfLine(); if (token != null) { return(new PreprocessorDeclarationLine(type, ((PreprocessorIdentifierToken)token).Value)); } else { return(null); } case PreprocessorTokenType.Warning: case PreprocessorTokenType.Error: return(new PreprocessorControlLine(type, lexer.GetRestOfLine())); case PreprocessorTokenType.Line: // hidden, default type = PeekType(); if (type == PreprocessorTokenType.Default || type == PreprocessorTokenType.Hidden) { NextToken(); EatEndOfLine(); return(new PreprocessorLine(type)); } token = Eat(PreprocessorTokenType.Int); if (token != null) { int line = ((PreprocessorIntToken)token).Value; string file = null; if (PeekType() == PreprocessorTokenType.String) { file = ((PreprocessorStringToken)NextToken()).Value; } EatEndOfLine(); return(new PreprocessorLineNumberLine(line, file)); } else { lexer.IgnoreRestOfLine(); return(null); } case PreprocessorTokenType.If: case PreprocessorTokenType.Elif: return(new PreprocessorIfLine(type, EvalExpression())); case PreprocessorTokenType.Else: case PreprocessorTokenType.Endif: return(new PreprocessorLine(type)); case PreprocessorTokenType.Region: case PreprocessorTokenType.EndRegion: lexer.IgnoreRestOfLine(); return(new PreprocessorLine(type)); case PreprocessorTokenType.Pragma: lexer.IgnoreRestOfLine(); return(new PreprocessorLine(type)); default: ReportError(PreprocessorError.UnexpectedDirective, token.Position); return(null); } }
public PreprocessorLine Parse(TextBuffer text, IDictionary defines) { this.text = text; this.defines = defines; Debug.Assert(text.PeekChar() == '#'); text.NextChar(); PreprocessorToken token = NextToken(); PreprocessorTokenType type = token.Type; switch (type) { case PreprocessorTokenType.Define: case PreprocessorTokenType.Undef: token = Eat(PreprocessorTokenType.Identifier); EatEndOfLine(); if (token != null) { return new PreprocessorDeclarationLine(type, ((PreprocessorIdentifierToken)token).Value); } else { return null; } case PreprocessorTokenType.Warning: case PreprocessorTokenType.Error: return new PreprocessorControlLine(type, lexer.GetRestOfLine()); case PreprocessorTokenType.Line: // hidden, default type = PeekType(); if (type == PreprocessorTokenType.Default || type == PreprocessorTokenType.Hidden) { NextToken(); EatEndOfLine(); return new PreprocessorLine(type); } token = Eat(PreprocessorTokenType.Int); if (token != null) { int line = ((PreprocessorIntToken)token).Value; string file = null; if (PeekType() == PreprocessorTokenType.String) { file = ((PreprocessorStringToken)NextToken()).Value; } EatEndOfLine(); return new PreprocessorLineNumberLine(line, file); } else { lexer.IgnoreRestOfLine(); return null; } case PreprocessorTokenType.If: case PreprocessorTokenType.Elif: return new PreprocessorIfLine(type, EvalExpression()); case PreprocessorTokenType.Else: case PreprocessorTokenType.Endif: return new PreprocessorLine(type); case PreprocessorTokenType.Region: case PreprocessorTokenType.EndRegion: lexer.IgnoreRestOfLine(); return new PreprocessorLine(type); case PreprocessorTokenType.Pragma: lexer.IgnoreRestOfLine(); return new PreprocessorLine(type); default: ReportError(PreprocessorError.UnexpectedDirective, token.Position); return null; } }
private PreprocessorTokenType PeekType() { if (token == null) { token = NextToken(); } return token.Type; }
private PreprocessorToken NextToken() { if (this.token == null) { return lexer.NextToken(text); } else { PreprocessorToken token = this.token; this.token = null; return token; } }