public void Parse(string code, out CommonTokenStream TokStream, out Parser Parser, out Lexer Lexer, out IParseTree ParseTree) { IParseTree pt = null; // Set up Antlr to parse input grammar. byte[] byteArray = Encoding.UTF8.GetBytes(code); var ais = new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()); var lexer = new ANTLRv4Lexer(ais); CommonTokenStream cts = new CommonTokenStream(lexer); var parser = new ANTLRv4Parser(cts); try { pt = parser.grammarSpec(); } catch (Exception) { // Parsing error. } TokStream = cts; Parser = parser; Lexer = lexer; ParseTree = pt; }
public static void Main(string[] args) { ANTLRv4Lexer lexer = new ANTLRv4Lexer(new ANTLRFileStream("grammars/org/antlr/codebuff/ANTLRv4Lexer.g4")); CommonTokenStream tokens = new CodeBuffTokenStream(lexer); ANTLRv4Parser parser = new ANTLRv4Parser(tokens); ANTLRv4Parser.GrammarSpecContext tree = parser.grammarSpec(); Console.WriteLine(tree.ToStringTree(parser)); }
public void Completion_Grammar_Antlr() { var cwd = System.IO.Directory.GetCurrentDirectory(); // arrange var input = System.IO.File.ReadAllText("../../../Grammar/Expr.g4"); var inputStream = new AntlrInputStream(input); var lexer = new ANTLRv4Lexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new ANTLRv4Parser(tokenStream); lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); var errorListener = new CountingErrorListener(); parser.AddErrorListener(errorListener); // act // assert // Specify our entry point var tree = parser.grammarSpec(); Check.That(errorListener.ErrorCount).IsEqualTo(0); var core = new CodeCompletionCore(parser, null, null); // 1) At the input start. var candidates = core.CollectCandidates(0, null); Check.That(candidates.Tokens).HasSize(4); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.DOC_COMMENT); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.LEXER); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.PARSER); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.GRAMMAR); Check.That(candidates.Tokens[ANTLRv4Lexer.LEXER]).IsEqualTo(new[] { ANTLRv4Lexer.GRAMMAR }); Check.That(candidates.Tokens[ANTLRv4Lexer.PARSER]).IsEqualTo(new[] { ANTLRv4Lexer.GRAMMAR }); Check.That(candidates.Tokens[ANTLRv4Lexer.DOC_COMMENT]).HasSize(0); Check.That(candidates.Tokens[ANTLRv4Lexer.GRAMMAR]).HasSize(0); Check.That(candidates.Rules.Count == 0); // 2) Go to token index = 3 => ";" candidates = core.CollectCandidates(3, null); Check.That(candidates.Tokens).HasSize(1); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.SEMI); Check.That(candidates.Rules.Count == 0); // 3) Go to token index = 14 => just after the ";" of the rule for "expression". candidates = core.CollectCandidates(14, null); Check.That(candidates.Tokens).HasSize(3); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.CATCH); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.FINALLY); Check.That(candidates.Tokens).ContainsKey(ANTLRv4Lexer.RULE_REF); // CRASH because -2 is not a token, it is epsilon! }
public static IEnumerable <Tuple <string, Diagram> > ParseFile(string path) { ANTLRv4Lexer lexer = new ANTLRv4Lexer(CharStreams.fromPath(path)); CommonTokenStream stream = new CommonTokenStream(lexer); ANTLRv4Parser parser = new ANTLRv4Parser(stream); var tree = parser.grammarSpec(); var visitor = new ListDiagramVisitor(); return(visitor.VisitGrammarSpec(tree)); }
static void Main(string[] args) { CommonTokenStream cts = new CommonTokenStream( new ANTLRv4Lexer( new AntlrInputStream( new StreamReader( args[0]).ReadToEnd()))); var ant_parser = new ANTLRv4Parser(cts); ant_parser.grammarSpec(); }
static void Try(string ffn) { var str = new AntlrFileStream(ffn); var lexer = new ANTLRv4Lexer(str); var tokens = new CommonTokenStream(lexer); var parser = new ANTLRv4Parser(tokens); var listener = new ErrorListener <IToken>(parser, lexer, tokens); parser.AddErrorListener(listener); var tree = parser.grammarSpec(); if (listener.had_error) { System.Console.WriteLine("error in parse."); } else { System.Console.WriteLine("parse completed."); } System.Console.WriteLine(tokens.OutputTokens()); System.Console.WriteLine(tree.OutputTree(tokens)); }
public void Parse(ParserDetails pd) { string ffn = pd.FullFileName; string code = pd.Code; IParseTree pt = null; // Set up Antlr to parse input grammar. byte[] byteArray = Encoding.UTF8.GetBytes(code); AntlrInputStream ais = new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()) { name = ffn }; ANTLRv4Lexer lexer = new ANTLRv4Lexer(ais); CommonTokenStream cts = new CommonTokenStream(lexer); ANTLRv4Parser parser = new ANTLRv4Parser(cts); try { pt = parser.grammarSpec(); } catch (Exception) { // Parsing error. } //StringBuilder sb = new StringBuilder(); //TreeSerializer.ParenthesizedAST(pt, sb, "", cts); //string fn = System.IO.Path.GetFileName(ffn); //fn = "c:\\temp\\" + fn; //System.IO.File.WriteAllText(fn, sb.ToString()); pd.TokStream = cts; pd.Parser = parser; pd.Lexer = lexer; pd.ParseTree = pt; }
static void Main(string[] args) { var file_name = args[0]; var input = File.OpenText(file_name); var str = new AntlrInputStream(input); var lexer = new ANTLRv4Lexer(str); var tokens = new CommonTokenStream(lexer); var parser = new ANTLRv4Parser(tokens); var listener = new ErrorListener <IToken>(parser, lexer, tokens); parser.AddErrorListener(listener); var tree = parser.grammarSpec(); if (listener.had_error) { System.Console.WriteLine("error in parse."); } else { System.Console.WriteLine("parse completed."); System.Console.WriteLine(tokens.OutputTokens()); System.Console.WriteLine(tree.OutputTree(tokens)); } }
private void ProcessGrammarFile(Grammar grammar, string grammarFileName, AntlrErrorListener antlrErrorListener, CancellationToken cancellationToken) { string code = File.ReadAllText(Path.Combine(grammar.Directory, grammarFileName)); var inputStream = new AntlrInputStream(code); var codeSource = new CodeSource(grammarFileName, inputStream.ToString()); _result.GrammarFilesData.Add(grammarFileName, codeSource); string extension = Path.GetExtension(grammarFileName); if (extension != Grammar.AntlrDotExt) { return; } antlrErrorListener.CodeSource = codeSource; var antlr4Lexer = new ANTLRv4Lexer(inputStream); antlr4Lexer.RemoveErrorListeners(); antlr4Lexer.AddErrorListener(antlrErrorListener); var tokens = antlr4Lexer.GetAllTokens(); var codeTokenSource = new ListTokenSource(tokens); cancellationToken.ThrowIfCancellationRequested(); var codeTokenStream = new CommonTokenStream(codeTokenSource); var antlr4Parser = new ANTLRv4Parser(codeTokenStream); antlr4Parser.RemoveErrorListeners(); antlr4Parser.AddErrorListener(antlrErrorListener); var tree = antlr4Parser.grammarSpec(); var grammarInfoCollectorListener = new GrammarInfoCollectorListener(); grammarInfoCollectorListener.CollectInfo(antlrErrorListener.CodeSource, tree); var shortFileName = Path.GetFileNameWithoutExtension(grammarFileName); _result.GrammarActionsTextSpan[grammarFileName] = grammarInfoCollectorListener.CodeInsertions; var grammarType = grammarInfoCollectorListener.GrammarType; if (grammarType == GrammarType.Lexer || grammarType == GrammarType.Combined) { _result.LexerSuperClass = grammarInfoCollectorListener.SuperClass; } if (grammarType == GrammarType.Separated || grammarType == GrammarType.Combined) { _result.ParserSuperClass = grammarInfoCollectorListener.SuperClass; _result.Rules = grammarInfoCollectorListener.Rules; } void ErrorAction(ParsingError parsingError) { ErrorEvent?.Invoke(this, parsingError); _result.Errors.Add(parsingError); } var caseInsensitiveTypeOptionMatcher = new CaseInsensitiveTypeOptionMatcher(codeSource, grammarType, ErrorAction); var runtimeOptionMatcher = new RuntimeOptionMatcher(codeSource, grammarType, ErrorAction); var visitorOptionMatcher = new VisitorOptionMatcher(codeSource, grammarType, ErrorAction); var listenerOptionMatcher = new ListenerOptionMatcher(codeSource, grammarType, ErrorAction); var packageOptionMatcher = new PackageOptionMatcher(codeSource, grammarType, ErrorAction); var rootOptionMatcher = new RootOptionMatcher(codeSource, grammarType, ErrorAction, _result.Rules); var predictionOptionMatcher = new PredictionModeOptionMatcher(codeSource, grammarType, ErrorAction); foreach (IToken token in tokens) { if (token.Type == ANTLRv4Lexer.LINE_COMMENT || token.Type == ANTLRv4Lexer.BLOCK_COMMENT) { if (caseInsensitiveTypeOptionMatcher.Match(token, out var caseInsensitiveType)) { _result.CaseInsensitiveType = caseInsensitiveType; continue; } if (runtimeOptionMatcher.Match(token, out Runtime runtime)) { _result.Runtime = runtime; continue; } if (packageOptionMatcher.Match(token, out string package)) { _result.Package = package; continue; } if (visitorOptionMatcher.Match(token, out bool generateVisitor)) { _result.Visitor = generateVisitor; continue; } if (listenerOptionMatcher.Match(token, out bool generateListener)) { _result.Listener = generateListener; continue; } if (rootOptionMatcher.Match(token, out string root)) { _result.Root = root; continue; } if (predictionOptionMatcher.Match(token, out PredictionMode predictionMode)) { _result.PredictionMode = predictionMode; continue; } } } }
public void Test1() { { var input = @"grammar Expr; expression: assignment | simpleExpression; assignment : (VAR | LET) ID EQUAL simpleExpression ; simpleExpression : simpleExpression (PLUS | MINUS) simpleExpression | simpleExpression (MULTIPLY | DIVIDE) simpleExpression | variableRef | functionRef ; variableRef : ID ; functionRef : ID OPEN_PAR CLOSE_PAR ; VAR: [vV] [aA] [rR]; LET: [lL] [eE] [tT]; PLUS: '+'; MINUS: '-'; MULTIPLY: '*'; DIVIDE: '/'; EQUAL: '='; OPEN_PAR: '('; CLOSE_PAR: ')'; ID: [a-zA-Z] [a-zA-Z0-9_]*; WS: [ \n\r\t] -> channel(HIDDEN); "; var inputStream = new AntlrInputStream(input); var lexer = new ANTLRv4Lexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new ANTLRv4Parser(tokenStream); lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); var errorListener = new CountingErrorListener(); parser.AddErrorListener(errorListener); var tree = parser.grammarSpec(); Assert.True(errorListener.ErrorCount == 0); var core = new CodeCompletionCore(parser, null, null); { var candidates = core.CollectCandidates(0, null); var t1 = candidates.Tokens.Count == 4; Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.DOC_COMMENT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.GRAMMAR)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.PARSER)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LEXER)); } { int index = 0; foreach (var t in tokenStream.GetTokens()) { if (t.Text == "assignment") // Stop on first "assignment" { index = t.TokenIndex; break; } } var candidates = core.CollectCandidates(index, null); Assert.True(index == 8); Assert.True(candidates.Tokens.Count == 9); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.TOKEN_REF)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.RULE_REF)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.STRING_LITERAL)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.BEGIN_ACTION)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LPAREN)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.DOT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.NOT)); Assert.True(candidates.Tokens.ContainsKey(-2)); } { int index = 0; foreach (var t in tokenStream.GetTokens()) { if (t.Text == ":") // Stop on first ":" { index = t.TokenIndex; break; } } var candidates = core.CollectCandidates(index, null); Assert.True(index == 6); Assert.True(candidates.Tokens.Count == 7); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.BEGIN_ARGUMENT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.OPTIONS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.RETURNS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LOCALS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.THROWS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.COLON)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.AT)); } } { var input = @"grammar Expr; expression: assignment | simpleExpression; assignment : (VAR | LET) ID EQUAL simpleExpression ; "; var inputStream = new AntlrInputStream(input); var lexer = new ANTLRv4Lexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new ANTLRv4Parser(tokenStream); lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); var errorListener = new CountingErrorListener(); parser.AddErrorListener(errorListener); var tree = parser.grammarSpec(); Assert.True(errorListener.ErrorCount == 0); var core = new CodeCompletionCore(parser, null, null); { var candidates = core.CollectCandidates(0, null); var t1 = candidates.Tokens.Count == 4; Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.DOC_COMMENT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.GRAMMAR)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.PARSER)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LEXER)); } { int index = 0; foreach (var t in tokenStream.GetTokens()) { if (t.Text == "assignment") // Stop on first "assignment" { index = t.TokenIndex; break; } } var candidates = core.CollectCandidates(index, null); Assert.True(index == 8); Assert.True(candidates.Tokens.Count == 9); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.TOKEN_REF)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.RULE_REF)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.STRING_LITERAL)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.BEGIN_ACTION)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LPAREN)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.DOT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.NOT)); Assert.True(candidates.Tokens.ContainsKey(-2)); } { int index = 0; foreach (var t in tokenStream.GetTokens()) { if (t.Text == ":") // Stop on first ":" { index = t.TokenIndex; break; } } var candidates = core.CollectCandidates(index, null); Assert.True(index == 6); Assert.True(candidates.Tokens.Count == 7); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.BEGIN_ARGUMENT)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.OPTIONS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.RETURNS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.LOCALS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.THROWS)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.COLON)); Assert.True(candidates.Tokens.ContainsKey(ANTLRv4Parser.AT)); } { int index = 0; int times = 0; foreach (var t in tokenStream.GetTokens()) { if (t.Text == ";") // Stop on ";" { if (++times == 3) { index = t.TokenIndex + 1; break; } } } var candidates = core.CollectCandidates(index, null); // candidates includes CATCH, FINALLY, -2. Why? Why not include // DOC_COMMENT, RULE_REF, ...? } } }
public void Parse(string plain_old_input_grammar, string ffn) { Text = plain_old_input_grammar; full_file_name = ffn; // Set up Antlr to parse input grammar. byte[] byteArray = Encoding.UTF8.GetBytes(plain_old_input_grammar); CommonTokenStream cts = new CommonTokenStream( new ANTLRv4Lexer( new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()))); _ant_parser = new ANTLRv4Parser(cts); // Set up another token stream containing comments. This might be // problematic as the parser influences the lexer. CommonTokenStream cts_off_channel = new CommonTokenStream( new ANTLRv4Lexer( new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd())), ANTLRv4Lexer.OFF_CHANNEL); // Get all comments. _ant_comments = new List <IToken>(); while (cts_off_channel.LA(1) != ANTLRv4Parser.Eof) { IToken token = cts_off_channel.LT(1); if (token.Type == ANTLRv4Parser.BLOCK_COMMENT || token.Type == ANTLRv4Parser.LINE_COMMENT) { _ant_comments.Add(token); } cts_off_channel.Consume(); } try { _ant_tree = _ant_parser.grammarSpec(); } catch (Exception e) { // Parsing error. } _all_nodes = DFSVisitor.DFS(_ant_tree as ParserRuleContext).ToArray(); { // Get all nonterminal names from the grammar. IEnumerable <IParseTree> nonterm_nodes_iterator = _all_nodes.Where((IParseTree n) => { TerminalNodeImpl nonterm = n as TerminalNodeImpl; return(nonterm?.Symbol.Type == ANTLRv4Parser.RULE_REF); }); _ant_nonterminals_names = nonterm_nodes_iterator.Select <IParseTree, string>( (t) => (t as TerminalNodeImpl).Symbol.Text).ToArray(); } { // Get all terminal names from the grammar. IEnumerable <IParseTree> term_nodes_iterator = _all_nodes.Where((IParseTree n) => { TerminalNodeImpl nonterm = n as TerminalNodeImpl; return(nonterm?.Symbol.Type == ANTLRv4Parser.TOKEN_REF); }); _ant_terminals_names = term_nodes_iterator.Select <IParseTree, string>( (t) => (t as TerminalNodeImpl).Symbol.Text).ToArray(); } { // Get all defining and applied occurences of nonterminal names in grammar. IEnumerable <IParseTree> nonterm_nodes_iterator = _all_nodes.Where((IParseTree n) => { TerminalNodeImpl nonterm = n as TerminalNodeImpl; if (nonterm == null) { return(false); } if (!_ant_nonterminals_names.Contains(nonterm.GetText())) { return(false); } // The token must be part of parserRuleSpec context. for (var p = nonterm.Parent; p != null; p = p.Parent) { if (p is ANTLRv4Parser.ParserRuleSpecContext) { return(true); } } return(false); }); _ant_nonterminals = nonterm_nodes_iterator.Select <IParseTree, IToken>( (t) => (t as TerminalNodeImpl).Symbol).ToArray(); // Get all defining and applied occurences of nonterminal names in grammar. var iterator = nonterm_nodes_iterator.Where((IParseTree n) => { TerminalNodeImpl term = n as TerminalNodeImpl; if (term == null) { return(false); } IRuleNode parent = term.Parent; for (int i = 0; i < parent.ChildCount; ++i) { if (parent.GetChild(i) == term && i + 1 < parent.ChildCount && parent.GetChild(i + 1).GetText() == ":") { return(true); } } return(false); }); _ant_nonterminals_defining = iterator.Select <IParseTree, IToken>( (t) => (t as TerminalNodeImpl).Symbol).ToArray(); } { // Get all defining and applied occurences of nonterminal names in grammar. IEnumerable <IParseTree> term_nodes_iterator = _all_nodes.Where((IParseTree n) => { TerminalNodeImpl term = n as TerminalNodeImpl; if (term == null) { return(false); } if (!_ant_terminals_names.Contains(term.GetText())) { return(false); } // The token must be part of parserRuleSpec context. for (var p = term.Parent; p != null; p = p.Parent) { if (p is ANTLRv4Parser.ParserRuleSpecContext || p is ANTLRv4Parser.LexerRuleSpecContext) { return(true); } } return(false); }); _ant_terminals = term_nodes_iterator.Select <IParseTree, IToken>( (t) => (t as TerminalNodeImpl).Symbol).ToArray(); // Get all defining nonterminal names in grammar. var iterator = term_nodes_iterator.Where((IParseTree n) => { TerminalNodeImpl term = n as TerminalNodeImpl; if (term == null) { return(false); } IRuleNode parent = term.Parent; for (int i = 0; i < parent.ChildCount; ++i) { if (parent.GetChild(i) == term && i + 1 < parent.ChildCount && parent.GetChild(i + 1).GetText() == ":") { return(true); } } return(false); }); _ant_terminals_defining = iterator.Select <IParseTree, IToken>( (t) => (t as TerminalNodeImpl).Symbol).ToArray(); } { // Get all keyword tokens in grammar. IEnumerable <IParseTree> keywords_interator = _all_nodes.Where((IParseTree n) => { TerminalNodeImpl nonterm = n as TerminalNodeImpl; if (nonterm == null) { return(false); } for (var p = nonterm.Parent; p != null; p = p.Parent) { // "parser grammar" "lexer grammar" etc. if (p is ANTLRv4Parser.GrammarTypeContext) { return(true); } if (p is ANTLRv4Parser.OptionsSpecContext) { return(true); } // "options ..." if (p is ANTLRv4Parser.OptionContext) { return(false); } // "import ..." if (p is ANTLRv4Parser.DelegateGrammarsContext) { return(true); } if (p is ANTLRv4Parser.DelegateGrammarContext) { return(false); } // "tokens ..." if (p is ANTLRv4Parser.TokensSpecContext) { return(true); } if (p is ANTLRv4Parser.IdListContext) { return(false); } // "channels ..." if (p is ANTLRv4Parser.ChannelsSpecContext) { return(true); } if (p is ANTLRv4Parser.ModeSpecContext) { return(true); } } return(false); }); _ant_keywords = keywords_interator.Select <IParseTree, IToken>( (t) => (t as TerminalNodeImpl).Symbol).ToArray(); } { // Get all defining and applied occurences of nonterminal names in grammar. IEnumerable <IParseTree> lit_nodes_iterator = _all_nodes.Where((IParseTree n) => { TerminalNodeImpl term = n as TerminalNodeImpl; if (term == null) { return(false); } // Chicken/egg problem. Assume that literals are marked // with the appropriate token type. if (term.Symbol == null) { return(false); } if (!(term.Symbol.Type == ANTLRv4Parser.STRING_LITERAL || term.Symbol.Type == ANTLRv4Parser.INT || term.Symbol.Type == ANTLRv4Parser.LEXER_CHAR_SET)) { return(false); } // The token must be part of parserRuleSpec context. for (var p = term.Parent; p != null; p = p.Parent) { if (p is ANTLRv4Parser.ParserRuleSpecContext || p is ANTLRv4Parser.LexerRuleSpecContext) { return(true); } } return(false); }); _ant_literals = lit_nodes_iterator.Select <IParseTree, IToken>( (t) => (t as TerminalNodeImpl).Symbol).ToArray(); } //pp.ErrorHandler = new MyErrorStrategy(); }
public GrammarCheckedState Check(InputState inputState, CancellationToken cancellationToken = default) { var grammar = inputState.Grammar; var result = new GrammarCheckedState(inputState); try { var antlrErrorListener = new AntlrErrorListener(); antlrErrorListener.ErrorEvent += ErrorEvent; antlrErrorListener.ErrorEvent += (sender, error) => { lock (result.Errors) { result.Errors.Add(error); } }; foreach (string grammarFileName in grammar.Files) { string code = File.ReadAllText(Path.Combine(grammar.Directory, grammarFileName)); var inputStream = new AntlrInputStream(code); var codeSource = new CodeSource(grammarFileName, inputStream.ToString()); result.GrammarFilesData.Add(grammarFileName, codeSource); string extension = Path.GetExtension(grammarFileName); if (extension != Grammar.AntlrDotExt) { continue; } antlrErrorListener.CodeSource = codeSource; var antlr4Lexer = new ANTLRv4Lexer(inputStream); antlr4Lexer.RemoveErrorListeners(); antlr4Lexer.AddErrorListener(antlrErrorListener); var codeTokenSource = new ListTokenSource(antlr4Lexer.GetAllTokens()); cancellationToken.ThrowIfCancellationRequested(); var codeTokenStream = new CommonTokenStream(codeTokenSource); var antlr4Parser = new ANTLRv4Parser(codeTokenStream); antlr4Parser.RemoveErrorListeners(); antlr4Parser.AddErrorListener(antlrErrorListener); var tree = antlr4Parser.grammarSpec(); var grammarInfoCollectorListener = new GrammarInfoCollectorListener(); grammarInfoCollectorListener.CollectInfo(antlrErrorListener.CodeSource, tree); var shortFileName = Path.GetFileNameWithoutExtension(grammarFileName); result.GrammarActionsTextSpan[grammarFileName] = grammarInfoCollectorListener.CodeInsertions; if (grammarFileName.Contains(Grammar.LexerPostfix)) { result.LexerSuperClass = grammarInfoCollectorListener.SuperClass; } if (grammarFileName.Contains(Grammar.ParserPostfix)) { result.ParserSuperClass = grammarInfoCollectorListener.SuperClass; } if (!shortFileName.Contains(Grammar.LexerPostfix)) { result.Rules = grammarInfoCollectorListener.Rules; cancellationToken.ThrowIfCancellationRequested(); } } } catch (Exception ex) { result.Exception = ex; if (!(ex is OperationCanceledException)) { ErrorEvent?.Invoke(this, new ParsingError(ex, WorkflowStage.GrammarChecked)); } } return(result); }