public static Module ParseModule(string readerName, TextReader reader, ParserErrorHandler errorHandler) { antlr.TokenStreamSelector selector = new antlr.TokenStreamSelector(); BooLexer lexer = new BooLexer(reader); lexer.setTabSize(TabSize); lexer.setFilename(readerName); lexer.setTokenObjectClass(TokenObjectClass); lexer.Initialize(selector, TabSize, TokenObjectClass); IndentTokenStreamFilter filter = new IndentTokenStreamFilter(lexer, WS, INDENT, DEDENT, EOS); selector.select(filter); BooParser parser = new BooParser(selector); parser.setFilename(readerName); parser.Error += errorHandler; Module module = parser.start(); module.Name = CreateModuleName(readerName); return(module); }
public static antlr.TokenStream CreateBooLexer(int tabSize, string readerName, TextReader reader) { var selector = new antlr.TokenStreamSelector(); var lexer = new WSABooLexer(reader); lexer.setFilename(readerName); lexer.Initialize(selector, tabSize, BooToken.TokenCreator); selector.select(lexer); return selector; }
public static antlr.TokenStream CreateBooLexer(int tabSize, string readerName, TextReader reader) { antlr.TokenStreamSelector selector = new antlr.TokenStreamSelector(); WSABooLexer lexer = new WSABooLexer(reader); lexer.setFilename(readerName); lexer.Initialize(selector, tabSize, Boo.Lang.Parser.BooToken.Creator); selector.select(lexer); return(selector); }
public static antlr.TokenStream CreateBooLexer(int tabSize, string readerName, TextReader reader) { var selector = new antlr.TokenStreamSelector(); var lexer = new BooLexer(reader); lexer.setFilename(readerName); lexer.Initialize(selector, tabSize, BooToken.TokenCreator); var filter = new IndentTokenStreamFilter(lexer, WS, INDENT, DEDENT, EOL); selector.select(filter); return selector; }
public static antlr.TokenStream CreateBooLexer(int tabSize, string readerName, TextReader reader) { var selector = new antlr.TokenStreamSelector(); var lexer = new BooLexer(reader); lexer.setFilename(readerName); lexer.Initialize(selector, tabSize, BooToken.TokenCreator); var filter = new WSATokenStreamFilter(lexer); selector.select(filter); return(selector); }
public static antlr.TokenStream CreateBooLexer(int tabSize, string readerName, TextReader reader) { antlr.TokenStreamSelector selector = new antlr.TokenStreamSelector(); BooLexer lexer = new BooLexer(reader); lexer.setFilename(readerName); lexer.Initialize(selector, tabSize, BooToken.TokenCreator); IndentTokenStreamFilter filter = new IndentTokenStreamFilter(lexer, WS, INDENT, DEDENT, EOL); selector.select(filter); return(selector); }
internal void Initialize(antlr.TokenStreamSelector selector, int tabSize, antlr.TokenCreator tokenCreator) { setTabSize(tabSize); setTokenCreator(tokenCreator); _selector = selector; _erecorder = new TokenStreamRecorder(selector); }
internal antlr.TokenStream CreateExpressionLexer() { BooExpressionLexer lexer = new BooExpressionLexer(getInputState()); lexer.setTabSize(getTabSize()); lexer.setTokenCreator(tokenCreator); // Apply the end-to-id token filter var selector = new antlr.TokenStreamSelector(); var filter = new EndTokenStreamFilter(lexer, END, ID); selector.select(filter); return selector; }