protected void pairvalue( IDeclarationCollection collection ) //throws RecognitionException, TokenStreamException { Token keyToken = null; DefinitionBase definition = null; try { // for error handling keyToken = LT(1); match(STRING_LITERAL); match(COLON); String key = keyToken.getText(); definition = collection.Add(key, ToLexicalInfo(keyToken)); type_name(definition); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_6_); } }
protected void include( AspectDefinition aspect ) //throws RecognitionException, TokenStreamException { Token i = null; TypeReference tr = null; MixinDefinition md; try { // for error handling i = LT(1); match(INCLUDE); md = new MixinDefinition(ToLexicalInfo(i)); tr = type_name_or_ref(); md.TypeReference = tr; aspect.Mixins.Add(md); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_10_); } }
protected void advice( PointCutDefinition pointcut ) //throws RecognitionException, TokenStreamException { Token i = null; TypeReference tr = null; InterceptorDefinition interDef = null; try { // for error handling i = LT(1); match(ADVICEINTERCEPTOR); interDef = new InterceptorDefinition(ToLexicalInfo(i)); match(LCURLY); tr = type_name_or_ref(); interDef.TypeReference = tr; pointcut.Advices.Add(interDef); match(RCURLY); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_13_); } }
public void mCOMMENT(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = COMMENT; match("//"); { // ( ... )* for (;;) { if ((tokenSet_0_.member(LA(1)))) { { match(tokenSet_0_); } } else { goto _loop15_breakloop; } } _loop15_breakloop :; } // ( ... )* _ttype = Token.SKIP; if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
public void TestClass() { Token[] tokens = new Token[] { new SimpleToken(TEXT, "class"), new SimpleToken(WS, " \t"), new SimpleToken(TEXT, "foo:"), new SimpleToken(WS, "\n\t"),// i new SimpleToken(TEXT, "def foo():"), new SimpleToken(WS, "\n\t\t"), // i new SimpleToken(TEXT, "pass"), new SimpleToken(WS, "\n\t\n\n\t"), // eos, d new SimpleToken(TEXT, "def bar():"), new SimpleToken(WS, "\n\t\t"), // i new SimpleToken(TEXT, "pass"), new Token(Token.EOF_TYPE) // eos, d, d }; AssertTokenSequence(tokens, TEXT, TEXT, INDENT, TEXT, INDENT, TEXT, EOS, DEDENT, TEXT, INDENT, TEXT, EOS, DEDENT, DEDENT, Token.EOF_TYPE); }
protected void pointcut( AspectDefinition aspect ) //throws RecognitionException, TokenStreamException { Token p = null; PointCutDefinition pointcut = null; PointCutFlags flags = PointCutFlags.Unspecified; try { // for error handling p = LT(1); match(POINTCUT); flags = pointcutflags(); pointcut = new PointCutDefinition(ToLexicalInfo(p), flags); aspect.PointCuts.Add(pointcut); pointcuttarget(pointcut); advices(pointcut); match(END); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_11_); } }
public override void initialize(Token tok) { CommonHiddenStreamToken t = (CommonHiddenStreamToken) tok; base.initialize(t); hiddenBefore = t.getHiddenBefore(); hiddenAfter = t.getHiddenAfter(); }
/*Add token to end of the queue * @param tok The token to add */ public void append(Token tok) { if (nbrEntries == buffer.Length) { expand(); } buffer[(offset + nbrEntries) & sizeLessOne] = tok; nbrEntries++; }
protected TypeReference type_name_def() //throws RecognitionException, TokenStreamException { TypeReference type; Token i = null; type = new TypeReference(); String typeToken = null; String assemblyToken = null; try { // for error handling typeToken = identifier(); type.TypeName = typeToken; { switch (LA(1)) { case IN: { i = LT(1); match(IN); assemblyToken = identifier(); type.AssemblyReference = new AssemblyReference(ToLexicalInfo(i), assemblyToken); break; } case END: case INCLUDE: case POINTCUT: case SEMI: case RBRACK: case RCURLY: { break; } default: { throw new NoViableAltException(LT(1), getFilename()); } } } } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_8_); } return(type); }
protected void import_directive( EngineConfiguration conf ) //throws RecognitionException, TokenStreamException { Token i = null; String ns; String assemblyName; ImportDirective import = null; try { // for error handling i = LT(1); match(IMPORT); ns = identifier(); import = new ImportDirective(ToLexicalInfo(i), ns); conf.Imports.Add(import); { switch (LA(1)) { case IN: { match(IN); assemblyName = identifier(); import.AssemblyReference = new AssemblyReference(ToLexicalInfo(i), assemblyName); break; } case EOF: case ASPECT: case IMPORT: case MIXINS: case INTERCEPTORS: { break; } default: { throw new NoViableAltException(LT(1), getFilename()); } } } } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_1_); } }
public void mSTRING_LITERAL(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = STRING_LITERAL; int _saveIndex = 0; _saveIndex = text.Length; match('"'); text.Length = _saveIndex; { // ( ... )* for (;;) { if ((LA(1) == '"') && (LA(2) == '"')) { match('"'); _saveIndex = text.Length; match('"'); text.Length = _saveIndex; } else if ((tokenSet_1_.member(LA(1)))) { { match(tokenSet_1_); } } else { goto _loop24_breakloop; } } _loop24_breakloop :; } // ( ... )* { if ((LA(1) == '"')) { _saveIndex = text.Length; match('"'); text.Length = _saveIndex; } else { } } if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
public void mRCURLY(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = RCURLY; match(')'); if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
protected void mDIGIT(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = DIGIT; matchRange('0', '9'); if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
protected String identifier() //throws RecognitionException, TokenStreamException { String value; Token id = null; Token id2 = null; value = null; sbuilder.Length = 0; try { // for error handling id = LT(1); match(ID); sbuilder.Append(id.getText()); value = sbuilder.ToString(); { // ( ... )* for (;;) { if ((LA(1) == DOT)) { match(DOT); id2 = LT(1); match(ID); sbuilder.Append('.'); sbuilder.Append(id2.getText()); } else { goto _loop65_breakloop; } } _loop65_breakloop :; } // ( ... )* value = sbuilder.ToString(); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_5_); } return(value); }
protected TypeReference type_name_or_ref() //throws RecognitionException, TokenStreamException { TypeReference type; Token refTypeToken = null; type = null; try { // for error handling switch (LA(1)) { case STRING_LITERAL: { refTypeToken = LT(1); match(STRING_LITERAL); type = new TypeReference(); type.LinkRef = refTypeToken.getText(); break; } case ID: { type = type_name_def(); break; } default: { throw new NoViableAltException(LT(1), getFilename()); } } } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_7_); } return(type); }
public static void Main(string[] args) { try { T lexer = new T(new CharBuffer(Console.In)); bool done = false; while (!done) { Token tok = lexer.nextToken(); Console.Out.WriteLine("Token: " + tok); if (tok.Type == Token.EOF_TYPE) { done = true; } } Console.Out.WriteLine("done lexing..."); } catch (Exception e) { Console.Error.WriteLine("exception: " + e); } }
public void mCHARLIT(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = CHARLIT; int _saveIndex = 0; _saveIndex = text.Length; match('\''); text.Length = _saveIndex; matchNot(EOF /*_CHAR*/); _saveIndex = text.Length; match('\''); text.Length = _saveIndex; if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
public void mINTLIT(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = INTLIT; { // ( ... )+ int _cnt19 = 0; for (;;) { if (((LA(1) >= '0' && LA(1) <= '9'))) { mDIGIT(false); } else { if (_cnt19 >= 1) { goto _loop19_breakloop; } else { throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());; } } _cnt19++; } _loop19_breakloop :; } // ( ... )+ if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
public CommonAST(Token tok) { initialize(tok); }
/*Expand the token buffer by doubling its capacity */ private void expand() { Token[] newBuffer = new Token[buffer.Length * 2]; // Copy the contents to the new buffer // Note that this will store the first logical item in the // first physical array element. for (int i = 0; i < buffer.Length; i++) { newBuffer[i] = elementAt(i); } // Re-initialize with new contents, keep old nbrEntries buffer = newBuffer; sizeLessOne = buffer.Length - 1; offset = 0; }
public override void initialize(Token tok) { setText(tok.getText()); setType(tok.Type); }
/// <summary> /// Creates a new AST node using the specified AST Node Type name. /// </summary> /// <param name="Token">Token instance to be used to initialize the new AST Node.</param> /// <param name="ASTNodeTypeName"> /// Fully qualified name of the Type to be used for creating the new AST Node. /// </param> /// <returns>A newly created and initialized AST node object.</returns> /// <remarks> /// Once created, the new AST node is initialized with the specified Token /// instance. The <see cref="System.Type"/> used for creating this new AST /// node is determined solely by <c>ASTNodeTypeName</c>. /// <para>The AST Node type must have a default/parameterless constructor.</para> /// </remarks> public virtual AST create(Token tok, string ASTNodeTypeName) { AST newNode = createFromNodeName(ASTNodeTypeName); newNode.initialize(tok); return newNode; }
public CommonASTWithHiddenTokens(Token tok) : base(tok) { }
public void TestMultipleDedent() { Token[] tokens = new Token[] { new SimpleToken(TEXT, "class Math:"), new SimpleToken(WS, "\n\t"), new SimpleToken(TEXT, "def foo:"), new SimpleToken(WS, "\n\t\t"), new SimpleToken(TEXT, "pass"), new SimpleToken(WS, "\n"), new SimpleToken(TEXT, "print(3)"), new Token(Token.EOF_TYPE) }; AssertTokenSequence(tokens, TEXT, INDENT, TEXT, INDENT, TEXT, EOS, DEDENT, DEDENT, TEXT, EOS, Token.EOF_TYPE); }
public NoViableAltException(Token t, string fileName_) : base("NoViableAlt", fileName_, t.getLine(), t.getColumn()) { token = t; }
void AssertTokenSequence(Token[] tokens, params int[] expectedSequence) { Queue queue = new Queue(); foreach (Token token in tokens) { queue.Enqueue(token); } IndentTokenStreamFilter stream = new IndentTokenStreamFilter(new FakeStream(queue), WS, INDENT, DEDENT, EOS); int index=0; foreach (int expected in expectedSequence) { Assert.AreEqual(expected, stream.nextToken().Type, "sequence item: " + (index++)); } }
// Expected BitSet / not BitSet public MismatchedTokenException(string[] tokenNames_, Token token_, BitSet set_, bool matchNot, string fileName_) : base("Mismatched Token", fileName_, token_.getLine(), token_.getColumn()) { tokenNames = tokenNames_; token = token_; tokenText = token_.getText(); mismatchType = matchNot ? TokenTypeEnum.NotSetType : TokenTypeEnum.SetType; bset = set_; }
public void TestWhitespaceWithSkipInBetween() { /* a: b: c // comment d */ Token[] tokens = new Token[] { new SimpleToken(TEXT, "a:"), new SimpleToken(WS, "\n\t"), new SimpleToken(TEXT, "b:"), new SimpleToken(WS, "\n\t\t"), new SimpleToken(TEXT, "c"), new SimpleToken(WS, "\n"), new SimpleToken(WS, "\n\t"), new SimpleToken(TEXT, "d"), new SimpleToken(WS, "\n"), new Token(Token.EOF_TYPE) }; AssertTokenSequence(tokens, TEXT, INDENT, TEXT, INDENT, TEXT, EOS, DEDENT, TEXT, EOS, DEDENT, EOS, Token.EOF_TYPE); }
public abstract void initialize(Token t);
protected String reg_ex( Boolean allowALL ) //throws RecognitionException, TokenStreamException { String value; Token id = null; value = null; sbuilder.Length = 0; try { // for error handling { id = LT(1); match(ID); sbuilder.Append(id.getText()); value = sbuilder.ToString(); { switch (LA(1)) { case WS: { match(WS); break; } case DOT: { match(DOT); sbuilder.Append('.'); match(ALL); sbuilder.Append('*'); break; } default: if ((tokenSet_16_.member(LA(1)))) { if (LA(1) == ALL) { if (allowALL) { break; } throw new NoViableAltException(LT(1), getFilename()); } } else if ((tokenSet_16_.member(LA(1)))) { } else { throw new NoViableAltException(LT(1), getFilename()); } break; } } } value = sbuilder.ToString(); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_16_); } return(value); }
public void mWS(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = WS; { switch (LA(1)) { case ' ': { match(' '); break; } case '\t': { match('\t'); break; } case '\u000c': { match('\f'); break; } case '\n': case '\r': { { if ((LA(1) == '\r') && (LA(2) == '\n')) { match("\r\n"); } else if ((LA(1) == '\r') && (true)) { match('\r'); } else if ((LA(1) == '\n')) { match('\n'); } else { throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn()); } } newline(); break; } default: { throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn()); } } } _ttype = Token.SKIP; if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
override public Token nextToken() //throws TokenStreamException { Token theRetToken = null; tryAgain: for (;;) { Token _token = null; int _ttype = Token.INVALID_TYPE; setCommitToPath(false); resetText(); try // for char stream error handling { try // for lexical error handling { switch (LA(1)) { case '*': { mALL(true); theRetToken = returnToken_; break; } case ':': { mCOLON(true); theRetToken = returnToken_; break; } case ';': { mSEMI(true); theRetToken = returnToken_; break; } case ',': { mCOMMA(true); theRetToken = returnToken_; break; } case '|': { mOR(true); theRetToken = returnToken_; break; } case '<': { mINHERITS(true); theRetToken = returnToken_; break; } case '[': { mLBRACK(true); theRetToken = returnToken_; break; } case ']': { mRBRACK(true); theRetToken = returnToken_; break; } case '(': { mLCURLY(true); theRetToken = returnToken_; break; } case ')': { mRCURLY(true); theRetToken = returnToken_; break; } case '.': { mDOT(true); theRetToken = returnToken_; break; } case '/': { mCOMMENT(true); theRetToken = returnToken_; break; } case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { mINTLIT(true); theRetToken = returnToken_; break; } case '\'': { mCHARLIT(true); theRetToken = returnToken_; break; } case '"': { mSTRING_LITERAL(true); theRetToken = returnToken_; break; } case '\t': case '\n': case '\u000c': case '\r': case ' ': { mWS(true); theRetToken = returnToken_; break; } case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': case '_': { mID(true); theRetToken = returnToken_; break; } default: { if (LA(1) == EOF_CHAR) { uponEOF(); returnToken_ = makeToken(Token.EOF_TYPE); } else { consume(); goto tryAgain; } } break; } if (null == returnToken_) { goto tryAgain; // found SKIP token } _ttype = returnToken_.Type; returnToken_.Type = _ttype; return(returnToken_); } catch (RecognitionException e) { if (!getCommitToPath()) { consume(); goto tryAgain; } throw new TokenStreamRecognitionException(e); } } catch (CharStreamException cse) { if (cse is CharStreamIOException) { throw new TokenStreamIOException(((CharStreamIOException)cse).io); } else { throw new TokenStreamException(cse.Message); } } } }
public void mID(bool _createToken) //throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token = null; int _begin = text.Length; _ttype = ID; { switch (LA(1)) { case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': { matchRange('a', 'z'); break; } case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': { matchRange('A', 'Z'); break; } case '_': { match('_'); break; } default: { throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn()); } } } { // ( ... )* for (;;) { switch (LA(1)) { case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': { matchRange('a', 'z'); break; } case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': { matchRange('A', 'Z'); break; } case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { matchRange('0', '9'); break; } case '_': { match('_'); break; } default: { goto _loop32_breakloop; } } } _loop32_breakloop :; } // ( ... )* _ttype = testLiteralsTable(_ttype); if (_createToken && (null == _token) && (_ttype != Token.SKIP)) { _token = makeToken(_ttype); _token.setText(text.ToString(_begin, text.Length - _begin)); } returnToken_ = _token; }
// Expected token / not token public MismatchedTokenException(string[] tokenNames_, Token token_, int expecting_, bool matchNot, string fileName_) : base("Mismatched Token", fileName_, token_.getLine(), token_.getColumn()) { tokenNames = tokenNames_; token = token_; tokenText = token_.getText(); mismatchType = matchNot ? TokenTypeEnum.NotTokenType : TokenTypeEnum.TokenType; expecting = expecting_; }
/// <summary> /// Creates and initializes a new AST node using the specified Token instance. /// The <see cref="System.Type"/> used for creating this new AST node is /// determined by the following: /// <list type="bullet"> /// <item>the current TokenTypeID-to-ASTNodeType mapping (if any) or,</item> /// <item>the <see cref="defaultASTNodeTypeObject_"/> otherwise</item> /// </list> /// </summary> /// <param name="tok">Token instance to be used to create new AST Node.</param> /// <returns>An initialized AST node object.</returns> public virtual AST create(Token tok) { AST newNode; if (tok == null) newNode = null; else { newNode = createFromNodeType(tok.Type); newNode.initialize(tok); } return newNode; }
// Expected range / not range public MismatchedTokenException(string[] tokenNames_, Token token_, int lower, int upper_, bool matchNot, string fileName_) : base("Mismatched Token", fileName_, token_.getLine(), token_.getColumn()) { tokenNames = tokenNames_; token = token_; tokenText = token_.getText(); mismatchType = matchNot ? TokenTypeEnum.NotRangeType : TokenTypeEnum.RangeType; expecting = lower; upper = upper_; }
public void TestTrailingWhiteSpace() { Token[] tokens = new Token[] { new SimpleToken(TEXT, "package"), new SimpleToken(WS, " "), new SimpleToken(TEXT, "Empty"), new SimpleToken(WS, "\n\n\n"), // 1 new Token(Token.EOF_TYPE) // 2 }; AssertTokenSequence(tokens, TEXT, TEXT, EOS, EOS, Token.EOF_TYPE); }
protected void aspects( EngineConfiguration conf ) //throws RecognitionException, TokenStreamException { Token a = null; Token aspectId = null; AspectDefinition aspect = null; TargetTypeDefinition target = null; TypeReference tr = null; try { // for error handling a = LT(1); match(ASPECT); aspectId = LT(1); match(ID); match(FOR); aspect = new AspectDefinition(ToLexicalInfo(a), aspectId.getText()); conf.Aspects.Add(aspect); { switch (LA(1)) { case ID: { tr = type_name_def(); target = new TargetTypeDefinition(tr); target.TargetStrategy = TargetStrategyEnum.SingleType; aspect.TargetType = target; break; } case LBRACK: { match(LBRACK); target = new TargetTypeDefinition( ); aspect.TargetType = target; String namespaceRegEx = null; { switch (LA(1)) { case ASSIGNFROM: { match(ASSIGNFROM); match(LCURLY); tr = type_name_def(); match(RCURLY); target.TargetStrategy = TargetStrategyEnum.Assignable; target.AssignType = tr; break; } case CUSTOMMATCHER: { match(CUSTOMMATCHER); match(LCURLY); tr = type_name_def(); match(RCURLY); target.TargetStrategy = TargetStrategyEnum.Custom; target.CustomMatcherType = tr; break; } case ID: { { namespaceRegEx = identifier(); target.TargetStrategy = TargetStrategyEnum.Namespace; target.NamespaceRoot = namespaceRegEx; { switch (LA(1)) { case EXCLUDES: { match(EXCLUDES); match(LCURLY); type_list(target.Excludes); match(RCURLY); break; } case RBRACK: { break; } default: { throw new NoViableAltException(LT(1), getFilename()); } } } } break; } default: { throw new NoViableAltException(LT(1), getFilename()); } } } match(RBRACK); break; } default: { throw new NoViableAltException(LT(1), getFilename()); } } } { { // ( ... )* for (;;) { if ((LA(1) == INCLUDE)) { include(aspect); } else { goto _loop33_breakloop; } } _loop33_breakloop :; } // ( ... )* { // ( ... )* for (;;) { if ((LA(1) == POINTCUT)) { pointcut(aspect); } else { goto _loop35_breakloop; } } _loop35_breakloop :; } // ( ... )* } match(END); } catch (RecognitionException ex) { reportError(ex); consume(); consumeUntil(tokenSet_4_); } }