public Token NextToken() { if (curToken == null) { curToken = Next(); specialTracker.InformToken(curToken.kind); return curToken; } lastToken = curToken; if (curToken.next == null) { curToken.next = Next(); specialTracker.InformToken(curToken.next.kind); } curToken = curToken.next; return curToken; }
public void PrintTokenList(ArrayList tokenList) { ArrayList trackList = (ArrayList)tokenList.Clone(); while (this.token == null || trackList.Count > 0) { this.token = lexer.NextToken(); PrintSpecials(this.token.kind); for (int i = 0; i < trackList.Count; ++i) { trackList.RemoveAt(i); break; } } foreach (int token in tokenList) { text.Append(Tokens.GetTokenString(token)); Space(); } }
public void PrintToken(int token) { // Console.WriteLine("PRINT TOKEN:" + token); if (token == Tokens.Semicolon && !EmitSemicolon) { return; } while (this.token == null || this.token.kind > 0) { this.token = lexer.NextToken(); PrintSpecials(this.token.kind); if (this.token.kind == token) { break; } } text.Append(Tokens.GetTokenString(token)); gotBlankLine = false; }
public void PrintIdentifier(string identifier) { this.token = lexer.NextToken(); PrintSpecials(token.kind); text.Append(identifier); }
public void EndFile() { while (this.token == null || this.token.kind > 0) { this.token = lexer.NextToken(); PrintSpecials(token.kind); } PrintSpecials(-1); // foreach (object o in lexer.SpecialTracker.CurrentSpecials) { // Console.WriteLine(o); // } }
public void StartPeek() { peekToken = curToken; }
public Token Peek() { if (peekToken.next == null) { peekToken.next = Next(); specialTracker.InformToken(peekToken.next.kind); } peekToken = peekToken.next; return peekToken; }
void OverloadableOperator( #line 1462 "cs.ATG" out Token op) { switch (la.kind) { case 4: { lexer.NextToken(); break; } case 5: { lexer.NextToken(); break; } case 22: { lexer.NextToken(); break; } case 25: { lexer.NextToken(); break; } case 29: { lexer.NextToken(); break; } case 30: { lexer.NextToken(); break; } case 112: { lexer.NextToken(); break; } case 71: { lexer.NextToken(); break; } case 6: { lexer.NextToken(); break; } case 7: { lexer.NextToken(); break; } case 8: { lexer.NextToken(); break; } case 26: { lexer.NextToken(); break; } case 27: { lexer.NextToken(); break; } case 28: { lexer.NextToken(); break; } case 35: { lexer.NextToken(); break; } case 36: { lexer.NextToken(); break; } case 31: { lexer.NextToken(); break; } case 32: { lexer.NextToken(); break; } case 20: { lexer.NextToken(); break; } case 21: { lexer.NextToken(); break; } case 33: { lexer.NextToken(); break; } case 34: { lexer.NextToken(); break; } default: SynErr(159); break; } #line 1471 "cs.ATG" op = t; }
/* skip: { "*" | "[" { "," } "]" } */ /* !!! Proceeds from current peek position !!! */ bool IsPointerOrDims (ref Token pt) { for (;;) { if (pt.kind == Tokens.OpenSquareBracket) { do pt = Peek(); while (pt.kind == Tokens.Comma); if (pt.kind != Tokens.CloseSquareBracket) return false; } else if (pt.kind != Tokens.Times) break; pt = Peek(); } return true; }
/* Checks whether the next sequences of tokens is a qualident * * and returns the qualident string */ /* !!! Proceeds from current peek position !!! */ bool IsQualident (ref Token pt, out string qualident) { qualident = ""; if (pt.kind == Tokens.Identifier) { qualident = pt.val; pt = Peek(); while (pt.kind == Tokens.Dot) { pt = Peek(); if (pt.kind != Tokens.Identifier) return false; qualident += "." + pt.val; pt = Peek(); } return true; } else return false; }