public void Execute(Repl repl, ReplParser.XgrepContext tree, bool piped) { var expr = repl.GetArg(tree.arg()); IParseTree[] atrees; Parser parser; Lexer lexer; string text; string fn; ITokenStream tokstream; if (piped) { var lines = repl.input_output_stack.Pop(); var serializeOptions = new JsonSerializerOptions(); serializeOptions.Converters.Add(new AntlrJson.ParseTreeConverter()); serializeOptions.WriteIndented = false; var parse_info = JsonSerializer.Deserialize <AntlrJson.ParsingResultSet>(lines, serializeOptions); text = parse_info.Text; fn = parse_info.FileName; atrees = parse_info.Nodes; parser = parse_info.Parser; lexer = parse_info.Lexer; tokstream = parse_info.Stream; } else { var doc = repl.stack.Peek(); var pr = ParsingResultsFactory.Create(doc); parser = pr.Parser; lexer = pr.Lexer; text = pr.Code; fn = pr.FullFileName; tokstream = pr.TokStream; IParseTree atree = pr.ParseTree; atrees = new IParseTree[] { atree }; } org.eclipse.wst.xml.xpath2.processor.Engine engine = new org.eclipse.wst.xml.xpath2.processor.Engine(); IParseTree root = atrees.First().Root(); var ate = new AntlrTreeEditing.AntlrDOM.ConvertToDOM(); using (AntlrTreeEditing.AntlrDOM.AntlrDynamicContext dynamicContext = ate.Try(root, parser)) { var l = atrees.Select(t => ate.FindDomNode(t)); var nodes = engine.parseExpression(expr, new StaticContextBuilder()).evaluate(dynamicContext, l.ToArray()) .Select(x => (x.NativeValue as AntlrTreeEditing.AntlrDOM.AntlrElement).AntlrIParseTree).ToArray(); var serializeOptions = new JsonSerializerOptions(); serializeOptions.Converters.Add(new AntlrJson.ParseTreeConverter()); serializeOptions.WriteIndented = false; var parse_info_out = new AntlrJson.ParsingResultSet() { Text = text, FileName = fn, Lexer = lexer, Parser = parser, Stream = tokstream, Nodes = nodes }; string js1 = JsonSerializer.Serialize(parse_info_out, serializeOptions); repl.input_output_stack.Push(js1); } }
public void Run(Repl repl, string input) { var path = Environment.CurrentDirectory; path = path + Path.DirectorySeparatorChar + "Generated"; var grammars = _repl._workspace.AllDocuments().Where(d => d.FullPath.EndsWith(".g4")).ToList(); var old = Environment.CurrentDirectory; try { Environment.CurrentDirectory = path; Assembly asm = Assembly.LoadFile(path + "/bin/Debug/netcoreapp3.1/Test.dll"); Type[] types = asm.GetTypes(); Type type = asm.GetType("Easy.Program"); var methods = type.GetMethods(); MethodInfo methodInfo = type.GetMethod("Parse"); string txt = input; if (input == null) { txt = repl.input_output_stack.Pop(); } object[] parm = new object[] { txt }; DateTime before = DateTime.Now; var res = methodInfo.Invoke(null, parm); DateTime after = DateTime.Now; System.Console.Error.WriteLine("Time to parse: " + (after - before)); var tree = res as IParseTree; var t2 = tree as ParserRuleContext; var m2 = type.GetProperty("Parser"); object[] p2 = new object[0]; var r2 = m2.GetValue(null, p2); var m3 = type.GetProperty("Lexer"); object[] p3 = new object[0]; var r3 = m3.GetValue(null, p3); var m4 = type.GetProperty("TokenStream"); object[] p4 = new object[0]; var r4 = m4.GetValue(null, p4); System.Console.Error.WriteLine("# tokens per sec = " + (r4 as ITokenStream).Size / (after - before).TotalSeconds); Environment.CurrentDirectory = old; var serializeOptions = new JsonSerializerOptions(); serializeOptions.Converters.Add(new AntlrJson.ParseTreeConverter()); serializeOptions.WriteIndented = false; var tuple = new AntlrJson.ParsingResultSet() { Text = txt, FileName = "stdin", Stream = r4 as ITokenStream, Nodes = new IParseTree[] { t2 }, Parser = r2 as Parser, Lexer = r3 as Lexer }; string js1 = JsonSerializer.Serialize(tuple, serializeOptions); repl.input_output_stack.Push(js1); } finally { Environment.CurrentDirectory = old; } }
public override ParsingResultSet Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { MyTokenStream out_token_stream = new MyTokenStream(); MyLexer lexer = new MyLexer(null); MyParser parser = new MyParser(out_token_stream); MyCharStream fake_char_stream = new MyCharStream(); string text = null; lexer.InputStream = fake_char_stream; if (!(reader.TokenType == JsonTokenType.StartObject)) { throw new JsonException(); } reader.Read(); List <string> mode_names = new List <string>(); List <string> channel_names = new List <string>(); List <string> lexer_rule_names = new List <string>(); List <string> literal_names = new List <string>(); List <string> symbolic_names = new List <string>(); Dictionary <string, int> token_type_map = new Dictionary <string, int>(); List <string> parser_rule_names = new List <string>(); Dictionary <int, IParseTree> nodes = new Dictionary <int, IParseTree>(); List <IParseTree> result = new List <IParseTree>(); while (reader.TokenType == JsonTokenType.PropertyName) { string pn = reader.GetString(); reader.Read(); if (pn == "FileName") { var name = reader.GetString(); fake_char_stream.SourceName = name; reader.Read(); } else if (pn == "Text") { out_token_stream.Text = reader.GetString(); fake_char_stream.Text = out_token_stream.Text; text = out_token_stream.Text; reader.Read(); } else if (pn == "Tokens") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); int token_index = 0; while (reader.TokenType == JsonTokenType.Number) { var type = reader.GetInt32(); reader.Read(); var start = reader.GetInt32(); reader.Read(); var stop = reader.GetInt32(); reader.Read(); var line = reader.GetInt32(); reader.Read(); var column = reader.GetInt32(); reader.Read(); var channel = reader.GetInt32(); reader.Read(); var token = new MyToken(); token.Type = type; token.StartIndex = start; token.StopIndex = stop; token.Line = line; token.Column = column; token.Channel = channel; token.InputStream = lexer.InputStream; token.TokenSource = lexer; token.TokenIndex = token_index++; token.Text = out_token_stream.Text.Substring(token.StartIndex, token.StopIndex - token.StartIndex + 1); out_token_stream.Add(token); } reader.Read(); } else if (pn == "ModeNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { mode_names.Add(reader.GetString()); reader.Read(); } reader.Read(); lexer._modeNames = mode_names.ToArray(); } else if (pn == "ChannelNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String) { channel_names.Add(reader.GetString()); reader.Read(); } reader.Read(); lexer._channelNames = channel_names.ToArray(); } else if (pn == "LiteralNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { literal_names.Add(reader.GetString()); reader.Read(); } reader.Read(); } else if (pn == "SymbolicNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { symbolic_names.Add(reader.GetString()); reader.Read(); } reader.Read(); } else if (pn == "LexerRuleNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { lexer_rule_names.Add(reader.GetString()); reader.Read(); } reader.Read(); } else if (pn == "ParserRuleNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { var name = reader.GetString(); parser_rule_names.Add(name); reader.Read(); } reader.Read(); } else if (pn == "TokenTypeMap") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { var name = reader.GetString(); reader.Read(); var tt = reader.GetInt32(); reader.Read(); token_type_map[name] = tt; } reader.Read(); } else if (pn == "Nodes") { List <IParseTree> list_of_nodes = new List <IParseTree>(); if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); int current = 1; while (reader.TokenType == JsonTokenType.Number) { int parent = reader.GetInt32(); reader.Read(); int type_of_node = reader.GetInt32(); reader.Read(); var parent_node = parent > 0 ? nodes[parent] as MyParserRuleContext : null; if (type_of_node < 1000000) { MyParserRuleContext foo = new MyParserRuleContext(parent_node, 0) { _ruleIndex = type_of_node }; nodes[current] = foo; if (parent_node == null) { result.Add(foo); } else { parent_node.AddChild((Antlr4.Runtime.RuleContext)foo); } } else { var index = type_of_node - 1000000; var symbol = out_token_stream.Get(index); var foo = new MyTerminalNodeImpl(symbol); nodes[current] = foo; foo.Parent = parent_node; if (parent_node == null) { result.Add(foo); } else { parent_node.AddChild(foo); } } current++; } foreach (var n in result) { Sweep(n); } reader.Read(); } else { throw new JsonException(); } } var vocab = new Vocabulary(literal_names.ToArray(), symbolic_names.ToArray()); parser._vocabulary = vocab; parser._grammarFileName = fake_char_stream.SourceName; parser._ruleNames = parser_rule_names.ToArray(); lexer._vocabulary = vocab; lexer._ruleNames = lexer_rule_names.ToArray(); lexer._tokenTypeMap = token_type_map; var res = new AntlrJson.ParsingResultSet() { FileName = fake_char_stream.SourceName, Stream = out_token_stream, Nodes = result.ToArray(), Lexer = lexer, Parser = parser, Text = text }; return(res); }