public List <ZhangShashaCSharp.Operation> Main(IParseTree t1, IParseTree t2) { // Convert the parse tree to a graph of rule symbols, then create a tree of uses from start rule. var table1 = new Transform.TableOfRules(); table1.ReadRules(t1); table1.FindPartitions(); table1.FindStartRules(); var table2 = new Transform.TableOfRules(); table2.ReadRules(t1); table2.FindPartitions(); table2.FindStartRules(); Digraph <string> g1 = new Digraph <string>(); foreach (TableOfRules.Row r in table1.rules) { if (!r.is_parser_rule) { continue; } g1.AddVertex(r.LHS); } foreach (TableOfRules.Row r in table1.rules) { if (!r.is_parser_rule) { continue; } List <string> j = r.RHS; //j.Reverse(); foreach (string rhs in j) { TableOfRules.Row sym = table1.rules.Where(t => t.LHS == rhs).FirstOrDefault(); if (!sym.is_parser_rule) { continue; } DirectedEdge <string> e = new DirectedEdge <string>(r.LHS, rhs); g1.AddEdge(e); } } List <string> starts1 = new List <string>(); foreach (TableOfRules.Row r in table1.rules) { if (r.is_parser_rule && r.is_start) { starts1.Add(r.LHS); } } Digraph <string> g2 = new Digraph <string>(); foreach (TableOfRules.Row r in table2.rules) { if (!r.is_parser_rule) { continue; } g2.AddVertex(r.LHS); } foreach (TableOfRules.Row r in table2.rules) { if (!r.is_parser_rule) { continue; } List <string> j = r.RHS; //j.Reverse(); foreach (string rhs in j) { TableOfRules.Row sym = table2.rules.Where(t => t.LHS == rhs).FirstOrDefault(); if (!sym.is_parser_rule) { continue; } DirectedEdge <string> e = new DirectedEdge <string>(r.LHS, rhs); g2.AddEdge(e); } } List <string> starts2 = new List <string>(); foreach (TableOfRules.Row r in table2.rules) { if (r.is_parser_rule && r.is_start) { starts2.Add(r.LHS); } } var map1 = new Dictionary <string, Node>(); var po1 = Algorithms.Postorder.Sort(g1, starts1).ToList(); throw new NotImplementedException(); }
public static void ShowCycles(int pos, Document document) { Dictionary <string, string> result = new Dictionary <string, string>(); // Check if initial file is a grammar. ParsingResults pd_parser = ParsingResultsFactory.Create(document) as ParsingResults; if (pd_parser == null) { throw new LanguageServerException("A grammar file is not selected. Please select one first."); } Transform.ExtractGrammarType egt = new Transform.ExtractGrammarType(); ParseTreeWalker.Default.Walk(egt, pd_parser.ParseTree); bool is_grammar = egt.Type == Transform.ExtractGrammarType.GrammarType.Parser || egt.Type == Transform.ExtractGrammarType.GrammarType.Combined || egt.Type == Transform.ExtractGrammarType.GrammarType.Lexer; if (!is_grammar) { throw new LanguageServerException("A grammar file is not selected. Please select one first."); } // Find all other grammars by walking dependencies (import, vocab, file names). HashSet <string> read_files = new HashSet <string> { document.FullPath }; Dictionary <Workspaces.Document, List <TerminalNodeImpl> > every_damn_literal = new Dictionary <Workspaces.Document, List <TerminalNodeImpl> >(); for (; ;) { int before_count = read_files.Count; foreach (string f in read_files) { List <string> additional = ParsingResults.InverseImports.Where( t => t.Value.Contains(f)).Select( t => t.Key).ToList(); read_files = read_files.Union(additional).ToHashSet(); } foreach (string f in read_files) { var additional = ParsingResults.InverseImports.Where( t => t.Key == f).Select( t => t.Value); foreach (var t in additional) { read_files = read_files.Union(t).ToHashSet(); } } int after_count = read_files.Count; if (after_count == before_count) { break; } } // Construct graph of symbol usage. Transform.TableOfRules table = new Transform.TableOfRules(pd_parser, document); table.ReadRules(); table.FindPartitions(); table.FindStartRules(); Digraph <string> graph = new Digraph <string>(); foreach (Transform.TableOfRules.Row r in table.rules) { if (!r.is_parser_rule) { continue; } graph.AddVertex(r.LHS); } foreach (Transform.TableOfRules.Row r in table.rules) { if (!r.is_parser_rule) { continue; } List <string> j = r.RHS; //j.Reverse(); foreach (string rhs in j) { Transform.TableOfRules.Row sym = table.rules.Where(t => t.LHS == rhs).FirstOrDefault(); if (!sym.is_parser_rule) { continue; } DirectedEdge <string> e = new DirectedEdge <string>(r.LHS, rhs); graph.AddEdge(e); } } List <string> starts = new List <string>(); List <string> parser_lhs_rules = new List <string>(); foreach (Transform.TableOfRules.Row r in table.rules) { if (r.is_parser_rule) { parser_lhs_rules.Add(r.LHS); if (r.is_start) { starts.Add(r.LHS); } } } IParseTree rule = null; IParseTree it = pd_parser.AllNodes.Where(n => { if (!(n is ANTLRv4Parser.ParserRuleSpecContext || n is ANTLRv4Parser.LexerRuleSpecContext)) { return(false); } Interval source_interval = n.SourceInterval; int a = source_interval.a; int b = source_interval.b; IToken ta = pd_parser.TokStream.Get(a); IToken tb = pd_parser.TokStream.Get(b); var start = ta.StartIndex; var stop = tb.StopIndex + 1; return(start <= pos && pos < stop); }).FirstOrDefault(); rule = it; var k = (ANTLRv4Parser.ParserRuleSpecContext)rule; var tarjan = new TarjanSCC <string, DirectedEdge <string> >(graph); List <string> ordered = new List <string>(); var sccs = tarjan.Compute(); StringBuilder sb = new StringBuilder(); sb.AppendLine("Cycles in " + document.FullPath); var done = new List <IEnumerable <string> >(); foreach (var scc in sccs) { if (scc.Value.Count() <= 1) { continue; } if (!done.Contains(scc.Value)) { foreach (var s in scc.Value) { sb.Append(" "); sb.Append(s); } sb.AppendLine(); sb.AppendLine(); done.Add(scc.Value); } } //var scc = sccs[k.RULE_REF().ToString()]; //foreach (var v in scc) //{ // ordered.Add(v); //} }