public Ast.Program Parse(SimpleLexer lex) { if (lexer != null) { throw new InvalidOperationException("Multithreading is not supported"); } lexer = lex; List <Ast.FunctionDeclaration> functions = new List <Ast.FunctionDeclaration>(); while (true) { var func = ParseFunctionDecl(); if (func == null) { break; } functions.Add(func); } lexer = null; if (!lex.EOF()) { throw new InvalidOperationException("Bad file : it must be `functionDeclaration*`"); } return(new Ast.Program(functions.ToDictionary(x => x.name, x => x))); }
// private void RegisterViewEngine() { var lexer = new SimpleLexer(); var parser = new SimpleParser(ParsingRules.Create); var engine = new Engine(templateName => Parse(lexer, parser, Context.Server.MapPath(templateName))); ViewEngines.Engines.Add(new SimpleEngine(engine, lexer, parser)); }
public void SingleDigitTest() { var simpleLexer = new SimpleLexer(); var tokens = simpleLexer.Tokenize("0"); Assert.That(tokens.Count(), Is.EqualTo(1)); Assert.That(tokens.First(), Is.EqualTo(new Token(TokenType.Digit, 0))); }
public void IncludeToken() { var sut = new SimpleLexer(); var result = sut.Tokenize("a{{include other b}}c").ToList(); Assert.AreEqual(4, result.Count); Assert.AreEqual("include", result[1].Type); Assert.AreEqual("{{include other b}}", result[1].Value); }
public void MultilineConstant() { var sut = new SimpleLexer(); var result = sut.Tokenize("a\nb").ToList(); Assert.AreEqual(2, result.Count); Assert.AreEqual("constant", result[0].Type); Assert.AreEqual("a\nb", result[0].Value); Assert.AreEqual(Token.EOF, result[1]); }
public static void Main(string[] args) { if (args.Length > 0) { ICharStream input = new ANTLRFileStream(args[0]); SimpleLexer lex = new SimpleLexer(input); CommonTokenStream tokens = new CommonTokenStream(lex); SimpleParser parser = new SimpleParser(tokens); parser.program(); } else Console.Error.WriteLine("Usage: island <input-file>"); }
public void SymbolsTest() { var simpleLexer = new SimpleLexer(); var tokens = new List<Token>(simpleLexer.Tokenize("+-")); Assert.That(tokens.Count(), Is.EqualTo(2)); ICollection resultTokens = new List<Token>( new[]{ new Token(TokenType.Plus), new Token(TokenType.Minus) }); AssertListIsSame(resultTokens, tokens); }
public static void Main(string[] args) { if (args.Length > 0) { ICharStream input = new ANTLRFileStream(args[0]); SimpleLexer lex = new SimpleLexer(input); CommonTokenStream tokens = new CommonTokenStream(lex); SimpleParser parser = new SimpleParser(tokens); parser.program(); } else { Console.Error.WriteLine("Usage: island <input-file>"); } }
public static CppTypeName Parse(string str) { if (str.Trim() == "*") { return(new CppTypeName(false, "*")); } var lexer = new SimpleLexer(str); var result = Parse(lexer); if (lexer.NextToken() != null) { throw new Exception("解析Cpp类型名错误,出现意外的尾随数据"); } return(result); }
public void SimpleEquationTest() { var simpleLexer = new SimpleLexer(); var tokens = new List<Token>(simpleLexer.Tokenize("9-5+2")); Assert.That(tokens.Count(), Is.EqualTo(5)); ICollection resultTokens = new List<Token>( new[]{ new Token(TokenType.Digit, 9), new Token(TokenType.Minus), new Token(TokenType.Digit, 5), new Token(TokenType.Plus), new Token(TokenType.Digit, 2), }); AssertListIsSame(resultTokens, tokens); }
public static CommonTree Parse(string formula) { var m = new MemoryStream(Encoding.UTF8.GetBytes(formula)); var input = new ANTLRInputStream(m); // Create lexer var lexer = new SimpleLexer(input); var tokens = new CommonTokenStream(lexer); // Create parser and start evaluating from start rule var parser = new SimpleParser(tokens); var tree = parser.start(); var root = (CommonTree)tree.Tree; return(root); }
// Here's where we do the real work... public static void ParseJavaFile(string f) { try { // Create a scanner that reads from the input stream passed to us SimpleLexer lexer = new SimpleLexer(new ANTLRFileStream(f)); CommonTokenStream tokens = new CommonTokenStream(); tokens.TokenSource = lexer; // Create a parser that reads from the scanner SimpleParser parser = new SimpleParser(tokens); // start parsing at the file rule parser.file(); } catch (Exception e) { Console.Error.WriteLine("[ERROR}"); Console.Error.WriteLine("parser exception: " + e); Console.Error.WriteLine(e.StackTrace); // so we can get stack trace } }
public void AllDigitsTest() { var simpleLexer = new SimpleLexer(); var tokens = new List<Token>(simpleLexer.Tokenize("0 1 2 3 4 5 6 7 8 9")); Assert.That(tokens.Count(), Is.EqualTo(10)); ICollection resultTokens = new List<Token>( new []{ new Token(TokenType.Digit, 0), new Token(TokenType.Digit, 1), new Token(TokenType.Digit, 2), new Token(TokenType.Digit, 3), new Token(TokenType.Digit, 4), new Token(TokenType.Digit, 5), new Token(TokenType.Digit, 6), new Token(TokenType.Digit, 7), new Token(TokenType.Digit, 8), new Token(TokenType.Digit, 9), }); AssertListIsSame(resultTokens, tokens); }
public void TwoDigitNumberTest() { var simpleLexer = new SimpleLexer(); var tokens = simpleLexer.Tokenize("42"); Assert.That(tokens.Count(), Is.EqualTo(1)); Assert.That(tokens.First(), Is.EqualTo(new Token(TokenType.Digit, 42))); }
private static CppTypeName Parse(SimpleLexer lexer) { var ptrInfos = new List <CppPtrType>(); var isConst = false; var name = lexer.NextToken(); while (name == "const") { isConst = true; name = lexer.NextToken(); } string nextToken; while ((nextToken = lexer.PeekToken()) == "const") { lexer.NextToken(); // "const" isConst = true; nextToken = lexer.PeekToken(); } List <CppTypeName> typeParam = null; if (nextToken == "<") { typeParam = new List <CppTypeName>(); lexer.NextToken(); // "<" if (lexer.PeekToken() != ">") { typeParam.Add(Parse(lexer)); while ((nextToken = lexer.NextToken()) == ",") { typeParam.Add(Parse(lexer)); } if (nextToken != ">") { throw new Exception("解析Cpp类型名错误,'<' 与 '>' 不匹配"); } } else { lexer.NextToken(); // ">" } } while ((nextToken = lexer.PeekToken()) == "const") { lexer.NextToken(); // "const" isConst = true; nextToken = lexer.PeekToken(); } while (nextToken == "*") { lexer.NextToken(); // "*" var ptrInfo = CppPtrType.Normal; while ((nextToken = lexer.PeekToken()) == "const") { ptrInfo = CppPtrType.Const; lexer.NextToken(); //const nextToken = lexer.PeekToken(); } ptrInfos.Add(ptrInfo); } if (ptrInfos.Count == 0) { ptrInfos = null; } return(new CppTypeName(isConst, name, typeParam, ptrInfos)); }
public string Tranlate(string input) { var lexer = new SimpleLexer(); var parser = new PredictiveRecursiveDescentParser(); return parser.Parse(lexer.Tokenize(input)); }
public SimpleParser(SimpleLexer lexer) { _lexer = lexer; _lookAhead = _lexer.NextToken(); }