public void Test() { string code = File.ReadAllText($@"..\..\..\{GetType().Name}.cs"); var tokens = MyLexer.Parse(code); var writer = LogManager.GetCurrentClassLogger(); foreach (var t in tokens) { writer.Info(JsonConvert.SerializeObject(t)); } }
public override ParsingResultSet Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { MyTokenStream out_token_stream = new MyTokenStream(); MyLexer lexer = new MyLexer(null); MyParser parser = new MyParser(out_token_stream); MyCharStream fake_char_stream = new MyCharStream(); string text = null; lexer.InputStream = fake_char_stream; if (!(reader.TokenType == JsonTokenType.StartObject)) { throw new JsonException(); } reader.Read(); List <string> mode_names = new List <string>(); List <string> channel_names = new List <string>(); List <string> lexer_rule_names = new List <string>(); List <string> literal_names = new List <string>(); List <string> symbolic_names = new List <string>(); Dictionary <string, int> token_type_map = new Dictionary <string, int>(); List <string> parser_rule_names = new List <string>(); Dictionary <int, IParseTree> nodes = new Dictionary <int, IParseTree>(); List <IParseTree> result = new List <IParseTree>(); while (reader.TokenType == JsonTokenType.PropertyName) { string pn = reader.GetString(); reader.Read(); if (pn == "FileName") { var name = reader.GetString(); fake_char_stream.SourceName = name; reader.Read(); } else if (pn == "Text") { out_token_stream.Text = reader.GetString(); fake_char_stream.Text = out_token_stream.Text; text = out_token_stream.Text; reader.Read(); } else if (pn == "Tokens") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); int token_index = 0; while (reader.TokenType == JsonTokenType.Number) { var type = reader.GetInt32(); reader.Read(); var start = reader.GetInt32(); reader.Read(); var stop = reader.GetInt32(); reader.Read(); var line = reader.GetInt32(); reader.Read(); var column = reader.GetInt32(); reader.Read(); var channel = reader.GetInt32(); reader.Read(); var token = new MyToken(); token.Type = type; token.StartIndex = start; token.StopIndex = stop; token.Line = line; token.Column = column; token.Channel = channel; token.InputStream = lexer.InputStream; token.TokenSource = lexer; token.TokenIndex = token_index++; token.Text = out_token_stream.Text.Substring(token.StartIndex, token.StopIndex - token.StartIndex + 1); out_token_stream.Add(token); } reader.Read(); } else if (pn == "ModeNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { mode_names.Add(reader.GetString()); reader.Read(); } reader.Read(); lexer._modeNames = mode_names.ToArray(); } else if (pn == "ChannelNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String) { channel_names.Add(reader.GetString()); reader.Read(); } reader.Read(); lexer._channelNames = channel_names.ToArray(); } else if (pn == "LiteralNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { literal_names.Add(reader.GetString()); reader.Read(); } reader.Read(); } else if (pn == "SymbolicNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { symbolic_names.Add(reader.GetString()); reader.Read(); } reader.Read(); } else if (pn == "LexerRuleNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { lexer_rule_names.Add(reader.GetString()); reader.Read(); } reader.Read(); } else if (pn == "ParserRuleNames") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { var name = reader.GetString(); parser_rule_names.Add(name); reader.Read(); } reader.Read(); } else if (pn == "TokenTypeMap") { if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); while (reader.TokenType == JsonTokenType.String || reader.TokenType == JsonTokenType.Null) { var name = reader.GetString(); reader.Read(); var tt = reader.GetInt32(); reader.Read(); token_type_map[name] = tt; } reader.Read(); } else if (pn == "Nodes") { List <IParseTree> list_of_nodes = new List <IParseTree>(); if (!(reader.TokenType == JsonTokenType.StartArray)) { throw new JsonException(); } reader.Read(); int current = 1; while (reader.TokenType == JsonTokenType.Number) { int parent = reader.GetInt32(); reader.Read(); int type_of_node = reader.GetInt32(); reader.Read(); var parent_node = parent > 0 ? nodes[parent] as MyParserRuleContext : null; if (type_of_node < 1000000) { MyParserRuleContext foo = new MyParserRuleContext(parent_node, 0) { _ruleIndex = type_of_node }; nodes[current] = foo; if (parent_node == null) { result.Add(foo); } else { parent_node.AddChild((Antlr4.Runtime.RuleContext)foo); } } else { var index = type_of_node - 1000000; var symbol = out_token_stream.Get(index); var foo = new MyTerminalNodeImpl(symbol); nodes[current] = foo; foo.Parent = parent_node; if (parent_node == null) { result.Add(foo); } else { parent_node.AddChild(foo); } } current++; } foreach (var n in result) { Sweep(n); } reader.Read(); } else { throw new JsonException(); } } var vocab = new Vocabulary(literal_names.ToArray(), symbolic_names.ToArray()); parser._vocabulary = vocab; parser._grammarFileName = fake_char_stream.SourceName; parser._ruleNames = parser_rule_names.ToArray(); lexer._vocabulary = vocab; lexer._ruleNames = lexer_rule_names.ToArray(); lexer._tokenTypeMap = token_type_map; var res = new AntlrJson.ParsingResultSet() { FileName = fake_char_stream.SourceName, Stream = out_token_stream, Nodes = result.ToArray(), Lexer = lexer, Parser = parser, Text = text }; return(res); }