static void Main(string[] args) { do { Console.Write("> "); string expr = Console.ReadLine(); AntlrInputStream inputStream = new AntlrInputStream(expr); ExprLexer exprLexer = new ExprLexer(inputStream); CommonTokenStream tokenStream = new CommonTokenStream(exprLexer); ExprParser exprParser = new ExprParser(tokenStream); exprParser.BuildParseTree = true; var cst = exprParser.compileUnit(); try { var ast = new BuildAstVisitor().VisitCompileUnit(cst); #if DEBUG PrintAst(ast, 0); #endif Console.WriteLine($"= {new AstEvaluator().Visit(ast)}"); } catch (Exception e) { Console.WriteLine(e.Message); } } while (true); }
static void Main(string[] args) { var input = new ANTLRStringStream("a=(3+4)*5\r\na+1\r\n"); var lexer = new ExprLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new ExprParser(tokens); parser.prog(); }
public Expr Parse(string expression) { var input = new AntlrInputStream(expression); var lexer = new ExprLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new ExprParser(tokens); var visitor = new ExprVisitor(); return(visitor.Visit(parser.expr())); }
public void Eval(string expr) { var stream = CharStreams.fromstring(expr); var lexer = new ExprLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new ExprParser(tokens) { BuildParseTree = true }; IParseTree tree = parser.prog(); ParseTreeWalker.Default.Walk(new Listener(_functions), tree); }
static void Main(string[] args) { using (var stardardInput = Console.OpenStandardInput()) { ANTLRInputStream input = new ANTLRInputStream(stardardInput); ExprLexer lexer = new ExprLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.prog(); } Console.ReadKey(); }
static void Main(string [] Args) { StreamReader input_src; // // If there is a file name on the command line, then use it as the input source; otherwise, // start reading from the console // if (Args.Length > 0) { input_src = File.OpenText(Args [0]); } else { Console.WriteLine("Enter expressions to evaluate"); input_src = new StreamReader(Console.OpenStandardInput(), Console.InputEncoding); } string input = input_src.ReadLine(); int cur_line = 1; // // Create a parser without a token source. This allows us to instantiate the parser just // once, preserving the @parser::members declared in the grammar // ExprParser parser = new ExprParser(null); parser.BuildParseTree = false; while (input.Length != 0) { input = input + "\n"; byte [] input_bytes = Encoding.ASCII.GetBytes(input); MemoryStream mem_stream = new MemoryStream(input_bytes); AntlrInputStream input_stream = new AntlrInputStream(mem_stream); // Create a stream that reads from the input source ExprLexer lexer = new ExprLexer(input_stream); // Create a lexer that feeds off of the input stream lexer.Line = cur_line; lexer.Column = 0; CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a buffer of tokens pulled from the lexer parser.TokenStream = tokens; parser.stat(); input = input_src.ReadLine(); cur_line = cur_line + 1; } // End while } // End Main
public static void Main(string[] args) { try { ICharStream input = new ANTLRReaderStream(System.Console.In); ExprLexer lexer = new ExprLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.prog(); } catch (System.Exception e) { Console.Error.WriteLine("exception: " + e); Console.Error.WriteLine(e.StackTrace); } }
public ExprNode Build(string str) { ExprLexer lexer; ExprParser parser; IParseTree tree; string tree_str; ExprNode ast; lexer = new ExprLexer(new AntlrInputStream(str)); parser = new ExprParser(new CommonTokenStream(lexer)); tree = parser.expr(); tree_str = tree.ToStringTree(parser); ast = this.Visit(tree); return(ast); }
static AstNode GetAst(string s) { AntlrInputStream inputStream = new AntlrInputStream(s); ExprLexer exprLexer = new ExprLexer(inputStream); CommonTokenStream tokenStream = new CommonTokenStream(exprLexer); ExprParser exprParser = new ExprParser(tokenStream) { BuildParseTree = true }; var cst = exprParser.compileUnit(); if (exprParser.NumberOfSyntaxErrors != 0) { throw new ArgumentException("invalid syntax"); } var ast = new AstBuilderVisitor().Visit(cst); return(ast); }
protected Expression BuildFunction(string str) { ExprLexer lexer; ExprParser parser; IParseTree tree; string tree_str; Expression ast; ExpressionBuilder builder = new ExpressionBuilder(); lexer = new ExprLexer(new AntlrInputStream(str)); parser = new ExprParser(new CommonTokenStream(lexer)); tree = parser.function(); tree_str = tree.ToStringTree(parser); ast = builder.Visit(tree); return(ast); }
public static void Main(string[] args) { try { ExprLexer lexer = new ExprLexer(new CharBuffer(Console.In)); ExprParser parser = new ExprParser(lexer); // set the type of tree node to create; this is default action // so it is unnecessary to do it here, but demos capability. parser.setASTNodeClass("antlr.CommonAST"); parser.expr(); antlr.CommonAST ast = (antlr.CommonAST)parser.getAST(); if (ast != null) { Console.Out.WriteLine(ast.ToStringList()); } else { Console.Out.WriteLine("null AST"); } } catch(Exception e) { Console.Error.WriteLine("exception: "+e); } }
static void Expr(string [] args) { string inputfilepath = null; if (0 < args.Length) { inputfilepath = args[0]; } var ist = Console.In; if (inputfilepath != null) { ist = new System.IO.StringReader(inputfilepath); } var input = new AntlrInputStream(ist); var lexer = new ExprLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new ExprParser(tokens); IParseTree tree = parser.prog(); var eval = new EvalVisitor(); eval.Visit(tree); Console.ReadKey(); }
public void ExpressionBuilderTest() { Expression ast; // // Test expressions with errors. // string errorStr = "aaa(p1,p2) bbb()"; ExprLexer lexer = new ExprLexer(new AntlrInputStream(errorStr)); ExprParser parser = new ExprParser(new CommonTokenStream(lexer)); IParseTree tree = parser.expr(); var exception = ((ParserRuleContext)tree.GetChild(0)).exception; Assert.AreEqual(exception.GetType().Name, "InputMismatchException"); Assert.AreEqual(exception.OffendingToken.Text, "bbb"); // // Test all primitive values (literals) within an arithmetic expressions // string literalStr = "1 + 2 * 2.2 - (\"three three\" / 33.3)"; ast = BuildExpr(literalStr); Assert.AreEqual(ast.Operation, Operation.SUB); Assert.AreEqual(ast.Operands[0].Operation, Operation.ADD); Assert.AreEqual(ast.Operands[1].Operation, Operation.DIV); Assert.AreEqual(ast.Operands[0].Operands[1].Operation, Operation.MUL); Assert.AreEqual(ast.Operands[1].Operands[0].Output, "three three"); Assert.AreEqual(ast.Operands[1].Operands[1].Output, 33.3); Assert.AreEqual(ast.Operands[0].Operands[1].Operands[1].Output, 2.2); Assert.AreEqual(ast.Operands[0].Operands[0].Output, 1); // // Test logical operations of comparision and equaliy // string logicalStr = "1 <= 2 * 2.2 || (\"three three\" / 33.3) < 44 && 10 > 20"; ast = BuildExpr(logicalStr); Assert.AreEqual(ast.Operation, Operation.OR); Assert.AreEqual(ast.Operands[0].Operation, Operation.LEQ); Assert.AreEqual(ast.Operands[1].Operation, Operation.AND); Assert.AreEqual(ast.Operands[1].Operands[1].Operation, Operation.GRE); // // Add simple access (function, variables, fields etc.) // string accessStr = "aaa(p1,p2) + bbb() * [bbb BBB] - ([ccc CCC](p1*p2) / ddd(p1()+p2(), p3()))"; ast = BuildExpr(accessStr); Assert.AreEqual(ast.Operation, Operation.SUB); Assert.AreEqual(ast.Operands[0].Operation, Operation.ADD); Assert.AreEqual(ast.Operands[0].Operands[0].Name, "aaa"); Assert.AreEqual(ast.Operands[0].Operands[0].Operands[0].Name, "p1"); Assert.AreEqual(ast.Operands[0].Operands[0].Operands[1].Name, "p2"); Assert.AreEqual(ast.Operands[0].Operands[1].Operation, Operation.MUL); Assert.AreEqual(ast.Operands[0].Operands[1].Operands[0].Name, "bbb"); Assert.AreEqual(ast.Operands[0].Operands[1].Operands[1].Name, "bbb BBB"); // // Add complex access (dot, projection, de-projection paths with priority/scopes) // string accessPathStr = "aaa(p1,p2) . bbb() <- [bbb BBB] + [ccc CCC](this.p1.p2()) -> ddd(p1()<-p2()->p3()) -> eee"; ast = BuildExpr(accessPathStr); Assert.AreEqual(ast.Operation, Operation.ADD); Assert.AreEqual(ast.Operands[0].Operation, Operation.DEPROJECTION); Assert.AreEqual(ast.Operands[0].Name, "bbb BBB"); Assert.AreEqual(ast.Operands[0].Input.Operation, Operation.DOT); Assert.AreEqual(ast.Operands[0].Input.Name, "bbb"); Assert.AreEqual(ast.Operands[0].Input.Input.Name, "aaa"); Assert.AreEqual(ast.Operands[1].Operation, Operation.PROJECTION); Assert.AreEqual(ast.Operands[1].Name, "eee"); Assert.AreEqual(ast.Operands[1].Input.Operation, Operation.PROJECTION); Assert.AreEqual(ast.Operands[1].Input.Name, "ddd"); Assert.AreEqual(ast.Operands[1].Input.Input.Name, "ccc CCC"); Assert.AreEqual(ast.Operands[1].Input.Input.Operands[0].Name, "p2"); Assert.AreEqual(ast.Operands[1].Input.Input.Operands[0].Input.Name, "p1"); Assert.AreEqual(ast.Operands[1].Input.Input.Operands[0].Input.Input.Name, "this"); Assert.AreEqual(ast.Operands[1].Input.Operands[0].Name, "p3"); Assert.AreEqual(ast.Operands[1].Input.Operands[0].Input.Name, "p2"); Assert.AreEqual(ast.Operands[1].Input.Operands[0].Input.Input.Name, "p1"); }
public void Completion_Grammar_SimpleExpression() { // arrange var input = "var c = a + b()"; var inputStream = new AntlrInputStream(input); var lexer = new ExprLexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new ExprParser(tokenStream); lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); var errorListener = new CountingErrorListener(); parser.AddErrorListener(errorListener); // act // assert // Specify our entry point var tree = parser.expression(); Check.That(errorListener.ErrorCount).IsEqualTo(0); var core = new CodeCompletionCore(parser, null, null); // 1) At the input start. var candidates = core.CollectCandidates(0, null); Check.That(candidates.Tokens).HasSize(3); Check.That(candidates.Tokens).ContainsKey(ExprLexer.VAR); Check.That(candidates.Tokens).ContainsKey(ExprLexer.LET); Check.That(candidates.Tokens).ContainsKey(ExprLexer.ID); Check.That(candidates.Tokens[ExprLexer.VAR]).IsEqualTo(new[] { ExprLexer.ID, ExprLexer.EQUAL }); Check.That(candidates.Tokens[ExprLexer.LET]).IsEqualTo(new[] { ExprLexer.ID, ExprLexer.EQUAL }); Check.That(candidates.Tokens[ExprLexer.ID]).HasSize(0); // 2) On the first whitespace. In real implementations you would do some additional checks where in the whitespace // the caret is, as the outcome is different depending on that position. candidates = core.CollectCandidates(1, null); Check.That(candidates.Tokens).HasSize(1); Check.That(candidates.Tokens).ContainsKey(ExprLexer.ID); // 3) On the variable name ('c'). candidates = core.CollectCandidates(2, null); Check.That(candidates.Tokens).HasSize(1); Check.That(candidates.Tokens).ContainsKey(ExprLexer.ID); // 4) On the equal sign (ignoring whitespace positions from now on). candidates = core.CollectCandidates(4, null); Check.That(candidates.Tokens).HasSize(1); Check.That(candidates.Tokens).ContainsKey(ExprLexer.EQUAL); // 5) On the variable reference 'a'. But since we have not configure the c3 engine to return us var refs // (or function refs for that matter) we only get an ID here. candidates = core.CollectCandidates(6, null); Check.That(candidates.Tokens).HasSize(1); Check.That(candidates.Tokens).ContainsKey(ExprLexer.ID); // 6) On the '+' operator. Usually you would not show operators as candidates, but we have not set up the c3 engine // yet to not return them. candidates = core.CollectCandidates(8, null); Check.That(candidates.Tokens).HasSize(5); Check.That(candidates.Tokens).ContainsKey(ExprLexer.PLUS); Check.That(candidates.Tokens).ContainsKey(ExprLexer.MINUS); Check.That(candidates.Tokens).ContainsKey(ExprLexer.MULTIPLY); Check.That(candidates.Tokens).ContainsKey(ExprLexer.DIVIDE); Check.That(candidates.Tokens).ContainsKey(ExprLexer.OPEN_PAR); }
public void Completion_Grammar_TypicalExpression() { // arrange var expression = "var c = a + b"; var inputStream = new AntlrInputStream(expression); var lexer = new ExprLexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new ExprParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.LlExactAmbigDetection; lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); var errorListener = new CountingErrorListener(); parser.AddErrorListener(errorListener); // act // assert // Specify our entry point var tree = parser.expression(); Check.That(errorListener.ErrorCount).IsEqualTo(0); // Tell the engine to return certain rules to us, which we could use to look up values in a symbol table. var preferredRules = new HashSet <int>() { ExprParser.RULE_functionRef, ExprParser.RULE_variableRef }; // Ignore operators and the generic ID token. var ignoredTokens = new HashSet <int>() { ExprLexer.PLUS, ExprLexer.MINUS, ExprLexer.MULTIPLY, ExprLexer.DIVIDE }; var core = new CodeCompletionCore(parser, preferredRules, ignoredTokens); // 1) At the input start. var candidates = core.CollectCandidates(0, null); Check.That(candidates.Tokens).HasSize(2); Check.That(candidates.Tokens).ContainsKey(ExprLexer.VAR); Check.That(candidates.Tokens).ContainsKey(ExprLexer.LET); Check.That(candidates.Tokens .TryGetValue(ExprLexer.VAR, out var varCandidates)) .IsTrue(); Check.That(candidates.Tokens .TryGetValue(ExprLexer.LET, out var letCandidates)) .IsTrue(); Check.That(varCandidates).HasSize(2); Check.That(letCandidates).HasSize(2); Check.That(varCandidates).IsEqualTo(new[] { ExprLexer.ID, ExprLexer.EQUAL }); Check.That(letCandidates).IsEqualTo(new[] { ExprLexer.ID, ExprLexer.EQUAL }); // 2) On the variable name ('c'). ignoredTokens = new HashSet <int>() { ExprLexer.ID, ExprLexer.PLUS, ExprLexer.MINUS, ExprLexer.MULTIPLY, ExprLexer.DIVIDE, ExprLexer.EQUAL }; core = new CodeCompletionCore(parser, preferredRules, ignoredTokens); candidates = core.CollectCandidates(2, null); Check.That(candidates.Tokens).HasSize(0); // 4) On the equal sign (ignoring whitespace positions from now on). candidates = core.CollectCandidates(4, null); Check.That(candidates.Tokens).HasSize(0); // 5) On the variable reference 'a'. candidates = core.CollectCandidates(6, null); Check.That(candidates.Tokens).HasSize(0); Check.That(candidates.Rules).HasSize(2); // Here we get 2 rule indexes, derived from 2 different IDs possible at this caret position. // These are what we told the engine above to be preferred rules for us. var found = 0; foreach (var candidate in candidates.Rules) { switch (candidate.Key) { case ExprParser.RULE_functionRef: { found++; break; } case ExprParser.RULE_variableRef: { found++; break; } } } Check.That(found).Equals(2); // 6) On the whitespace after the 'a' candidates = core.CollectCandidates(7, null); Check.That(candidates.Tokens).HasSize(0); Check.That(candidates.Rules).HasSize(1); // Here we get 2 rule indexes found = 0; foreach (var candidate in candidates.Rules) { switch (candidate.Key) { case ExprParser.RULE_functionRef: { found++; break; } case ExprParser.RULE_variableRef: { found++; break; } } } Check.That(found).Equals(1); }
static void Main(string [] Args) { StreamReader input_src; // // If there is a file name on the command line, then use it as the input source; otherwise, // use the console (keyboard) as the input source // if (Args.Length > 0) { input_src = File.OpenText(Args [0]); } else { Console.WriteLine("Enter expressions to evaluate"); input_src = new StreamReader(Console.OpenStandardInput(), Console.InputEncoding); } // // Read the first line from the input source // string input = input_src.ReadLine(); int cur_line = 1; // Needed when parsing lines in a file // // Create a parser without a token source. This allows us to instantiate the parser just // once, preserving the @parser::members declared in the grammar. Later, we'll attach the // parser to a token stream // ExprParser parser = new ExprParser(null); parser.BuildParseTree = false; // // Loop getting input from the input source (console or file) until end of file (or CTRL-Z if input is console) // while (input != null) { // // The grammar is expecting a NEWLINE as a statement terminator, but that isn't included by ReadLine so add a NEWLINE // to the end of the input string // input = input + "\n"; // // Turn the input string into a stream compatible with ANTLR // byte [] input_bytes = Encoding.ASCII.GetBytes(input); MemoryStream mem_stream = new MemoryStream(input_bytes); // // Attach ANTLR to the memory stream // AntlrInputStream input_stream = new AntlrInputStream(mem_stream); // Create a stream that reads from the input source ExprLexer lexer = new ExprLexer(input_stream); // Create a lexer that feeds off of the input stream // // When reading from a file the line number is important for error messages. Normally, we would read the entire file into // a string and then parse it, but we're not doing that; we are parsing each line as we read it, so tell the lexer the current // line number and character position before it lexes each input line. If we didn't do this, the error reporting mechanism // would always report that the error was on line 1 // lexer.Line = cur_line; lexer.Column = 0; CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a buffer of tokens pulled from the lexer // // Attach the parser to the new token stream (the current line), and start the parse by calling the 'stat' rule in the grammar // The semantic actions will then do all the work of outputting the results from processing the expressions // parser.TokenStream = tokens; parser.stat(); // // Get the next line of input from the input source // input = input_src.ReadLine(); cur_line = cur_line + 1; } // End while } // End Main