static void Main(string[] args) { //var program = "a = [\n" + // "\n" + // "# Look at me, I'm a riot!\n" + // "1\n" + // "]\n"; // Use with: //var antlrVisitorContext = parser.file_input(); var program = "if True:\n" + " a = 1\n" + " b = 2\n" + "\n"; var inputStream = new AntlrInputStream(program); var lexer = new Python3Lexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); var parser = new Python3Parser(commonTokenStream); var antlrVisitorContext = parser.single_input(); Console.ReadLine(); }
static void Main(string[] args) { using (var fileStream = new FileStream(args[0], FileMode.Open)) { AntlrInputStream inputStream = new AntlrInputStream(fileStream); Python3Lexer pyLexer = new Python3Lexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(pyLexer); // print out all the tokens, for debugging lexer grammar. commonTokenStream.Fill(); var tokens = commonTokenStream.GetTokens(); foreach (var token in tokens) { Console.WriteLine(string.Format("{0}: {1}\n", pyLexer.Vocabulary.GetSymbolicName(token.Type), token.Text)); } /* * Python3Parser pyParser = new Python3Parser(commonTokenStream); * * Python3Parser.File_inputContext progContext = pyParser.file_input(); * * Console.WriteLine(progContext.ToStringTree()); */ } }
public void Parse(string input) { AntlrInputStream stream = new AntlrInputStream(input); ITokenSource lexer = new Python3Lexer(stream); ITokenStream tokens = new CommonTokenStream(lexer); Python3Parser parser = new Python3Parser(tokens); parser.BuildParseTree = true; KeyPrinter printer = new KeyPrinter(); IParseTree tree = parser.file_input(); //ParseTreeWalker.Default.Walk(printer, tree); //IParseTree tree = parser.StartRule(); }
public string Tokenize(string fileContent) { AntlrInputStream inputStream = new AntlrInputStream(fileContent); Python3Lexer lexer = new Python3Lexer(inputStream); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); string result = ""; IList <Antlr4.Runtime.IToken> tokens = tokenStream.GetTokens(); foreach (var token in tokens) { result += (token.Type == Python3Lexer.NAME) ? "ID" : token.Text; } result = result.Replace("<EOF>", ""); return(result); }