static void _TestXbnfTokenizers(string[] args) { var cfg = CfgDocument.ReadFrom(@"..\..\..\xbnf.pck"); var lex = LexDocument.ReadFrom(@"..\..\..\xbnf.pck"); string input = null; using (var sr = File.OpenText(@"..\..\..\xbnf.xbnf")) input = sr.ReadToEnd(); var tokenizer1 = lex.ToTokenizer(input, cfg.EnumSymbols()); var tokenizer2 = tokenizer1; // new XbnfTokenizer(input); var t1 = new List <Token>(tokenizer1); var t2 = new List <Token>(tokenizer2); if (t1.Count != t2.Count) { Console.Error.WriteLine("Counts are different."); } for (int ic = t1.Count, i = 0; i < ic; ++i) { if (!Equals(t1[i], t2[i])) { Console.Error.WriteLine("at index {0}", i); Console.Error.WriteLine(t1[i]); Console.Error.WriteLine(t2[i]); break; } } }
static void _RunLalr(string[] args) { // we need both a lexer and a CfgDocument. // we read them from the same file. var cfg = CfgDocument.ReadFrom(@"..\..\..\expr.pck"); var lex = LexDocument.ReadFrom(@"..\..\..\expr.pck"); // create a runtime tokenizer var tokenizer = lex.ToTokenizer("3*(4+7)", cfg.EnumSymbols()); // create a parser var parser = cfg.ToLalr1Parser(tokenizer); /*parser.ShowHidden =false; * while (parser.Read()) * { * Console.WriteLine("{0}: {1}, {2} {3}", parser.NodeType, parser.Symbol, parser.Value,parser.Rule); * } * parser = new DebugLalr1Parser(cfg, tokenizer, pt); */ parser.ShowHidden = true; while (LRNodeType.EndDocument != parser.NodeType) { Console.WriteLine(parser.ParseReductions(true)); } return; }
static void _RunLL(string[] args) { var cfg = CfgDocument.ReadFrom(@"..\..\..\expr.ll1.pck"); var lex = LexDocument.ReadFrom(@"..\..\..\expr.ll1.pck"); var tokenizer = lex.ToTokenizer("3+4*(2+1+1)" /*new FileReaderEnumerable(@"..\..\..\xbnf.xbnf")*/, cfg.FillSymbols()); var parser = cfg.ToLL1Parser(tokenizer); //new LL1DebugParser(cfg,tokenizer); parser.ShowHidden = true; while (LLNodeType.EndDocument != parser.NodeType) { Console.WriteLine(parser.ParseSubtree(true)); } }
static void _RunXbnfGenerated(string[] args) { var cfg = CfgDocument.ReadFrom(@"..\..\..\xbnf.pck"); var lex = LexDocument.ReadFrom(@"..\..\..\xbnf.pck"); var tokenizer = new XbnfTokenizer(new FileReaderEnumerable(@"..\..\..\xbnf.xbnf")); var parser = new XbnfParser(tokenizer); parser.ShowHidden = true; while (LRNodeType.EndDocument != parser.NodeType) { Console.WriteLine(parser.ParseReductions(true)); } }
static void _RunLalrXbnf(string[] args) { var cfg = CfgDocument.ReadFrom(@"..\..\..\xbnf.pck"); var lex = LexDocument.ReadFrom(@"..\..\..\xbnf.pck"); var tokenizer = lex.ToTokenizer(new FileReaderEnumerable(@"..\..\..\xbnf.xbnf"), cfg.EnumSymbols()); //var pt = cfg.ToLalr1ParseTable();// new _ConsoleProgress()); var parser = cfg.ToLalr1Parser(tokenizer); //new Lalr1DebugParser(cfg, tokenizer, pt); parser.ShowHidden = true; while (LRNodeType.EndDocument != parser.NodeType) { Console.WriteLine(parser.ParseReductions(true)); } }