public override Dictionary <IToken, int> ExtractComments(string code) { if (code == null) { return(null); } byte[] byteArray = Encoding.UTF8.GetBytes(code); var ais = new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()); var lexer = new W3CebnfLexer(ais); CommonTokenStream cts_off_channel = new CommonTokenStream(lexer, W3CebnfLexer.OFF_CHANNEL); lexer.RemoveErrorListeners(); var lexer_error_listener = new ErrorListener <int>(null, lexer, this.QuietAfter); lexer.AddErrorListener(lexer_error_listener); Dictionary <IToken, int> new_list = new Dictionary <IToken, int>(); int type = (int)AntlrClassifications.ClassificationComment; while (cts_off_channel.LA(1) != W3CebnfParser.Eof) { IToken token = cts_off_channel.LT(1); if (token.Type == W3CebnfParser.COMMENT) { new_list[token] = type; } cts_off_channel.Consume(); } return(new_list); }
public override void Parse(string code, out CommonTokenStream TokStream, out Parser Parser, out Lexer Lexer, out IParseTree ParseTree) { IParseTree pt = null; // Set up Antlr to parse input grammar. byte[] byteArray = Encoding.UTF8.GetBytes(code); AntlrInputStream ais = new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()); var lexer = new W3CebnfLexer(ais); CommonTokenStream cts = new CommonTokenStream(lexer); var parser = new W3CebnfParser(cts); lexer.RemoveErrorListeners(); var lexer_error_listener = new ErrorListener <int>(parser, lexer, this.QuietAfter); lexer.AddErrorListener(lexer_error_listener); parser.RemoveErrorListeners(); var parser_error_listener = new ErrorListener <IToken>(parser, lexer, this.QuietAfter); parser.AddErrorListener(parser_error_listener); try { pt = parser.prods(); } catch (Exception) { // Parsing error. } TokStream = cts; Parser = parser; Lexer = lexer; ParseTree = pt; }
public override void Parse(ParsingResults pd, bool bail) { string ffn = pd.FullFileName; string code = pd.Code; if (ffn == null) { return; } if (code == null) { return; } this.QuietAfter = pd.QuietAfter; IParseTree pt = null; // Set up Antlr to parse input grammar. byte[] byteArray = Encoding.UTF8.GetBytes(code); AntlrInputStream ais = new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()) { name = ffn }; var lexer = new W3CebnfLexer(ais); CommonTokenStream cts = new CommonTokenStream(lexer); var parser = new W3CebnfParser(cts); lexer.RemoveErrorListeners(); var lexer_error_listener = new ErrorListener <int>(parser, lexer, pd.QuietAfter); lexer.AddErrorListener(lexer_error_listener); parser.RemoveErrorListeners(); var parser_error_listener = new ErrorListener <IToken>(parser, lexer, pd.QuietAfter); parser.AddErrorListener(parser_error_listener); BailErrorHandler bail_error_handler = null; if (bail) { bail_error_handler = new BailErrorHandler(); parser.ErrorHandler = bail_error_handler; } try { pt = parser.prods(); } catch (Exception) { // Parsing error. } //StringBuilder sb = new StringBuilder(); //TreeSerializer.ParenthesizedAST(pt, sb, "", cts); //string fn = System.IO.Path.GetFileName(ffn); //fn = "c:\\temp\\" + fn; //System.IO.File.WriteAllText(fn, sb.ToString()); if (parser_error_listener.had_error || lexer_error_listener.had_error || (bail_error_handler != null && bail_error_handler.had_error)) { System.Console.Error.WriteLine("Error in parse of " + ffn); } else { System.Console.Error.WriteLine("Parse completed of " + ffn); } pd.TokStream = cts; pd.Parser = parser; pd.Lexer = lexer; pd.ParseTree = pt; Stack <IParseTree> stack = new Stack <IParseTree>(); stack.Push(pt); while (stack.Any()) { var x = stack.Pop(); if (x is TerminalNodeImpl leaf) { } else { var y = x as AttributedParseTreeNode; if (y != null) { y.ParserDetails = pd; } for (int i = 0; i < x.ChildCount; ++i) { var c = x.GetChild(i); if (c != null) { stack.Push(c); } } } } }