public static DocumentSymbol GetDocumentSymbol(int index, Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(null); } Antlr4.Runtime.Tree.IParseTree pt = LanguageServer.Util.Find(index, doc); var gd = GrammarDescriptionFactory.Create(doc.FullPath); if (pt == null) { return(default(DocumentSymbol)); } Antlr4.Runtime.Tree.IParseTree p = pt; var q = p as Antlr4.Runtime.Tree.TerminalNodeImpl; var found = pd.Tags.TryGetValue(q, out int tag_type); if (!found) { return(null); } if (q.Symbol == null) { return(null); } return(new DocumentSymbol() { name = q.Symbol.Text, range = new Workspaces.Range(q.Symbol.StartIndex, q.Symbol.StopIndex), kind = tag_type }); }
public static int GetTag(int index, Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(-1); } Antlr4.Runtime.Tree.IParseTree pt = LanguageServer.Util.Find(index, doc); var gd = GrammarDescriptionFactory.Create(doc.FullPath); if (pt == null) { return(-1); } Antlr4.Runtime.Tree.IParseTree p = pt; var q = p as Antlr4.Runtime.Tree.TerminalNodeImpl; var found = pd.Tags.TryGetValue(q, out int tag_type); if (found) { return(tag_type); } if (q.Symbol == null) { return(-1); } var found2 = pd.Comments.TryGetValue(q.Symbol, out int tag2); if (found2) { return(tag2); } return(-1); }
public static List <string> Completion(int index, Document document) { var result = new List <string>(); var ref_pd = ParserDetailsFactory.Create(document); var foo = ref_pd.Candidates(index); return(result); }
public static (int, int) GetLineColumn(int index, Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(0, 0); } int cur_index = 0; var buffer = pd.Code; if (buffer == null) { return(0, 0); } int cur_line = 0; // zero based LSP. int cur_col = 0; // zero based LSP. for (; ;) { if (cur_index >= buffer.Length) { break; } if (cur_index >= index) { break; } var ch = buffer[cur_index]; if (ch == '\r') { if (buffer[cur_index + 1] == '\n') { cur_line++; cur_col = 0; cur_index += 2; } } else if (ch == '\n') { cur_line++; cur_col = 0; cur_index += 1; } else { cur_col += 1; cur_index += 1; } if (cur_index >= buffer.Length) { break; } } return(cur_line, cur_col); }
public static int GetIndex(int line, int column, Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(0); } int index = 0; var buffer = pd.Code; if (buffer == null) { return(0); } int cur_line = 0; int cur_col = 0; for (; ;) { if (cur_line > line) { break; } if (cur_line >= line && cur_col >= column) { break; } var ch = buffer[index]; if (ch == '\r') { if (buffer[index + 1] == '\n') { cur_line++; cur_col = 0; index += 2; } } else if (ch == '\n') { cur_line++; cur_col = 0; index += 1; } else { cur_col += 1; index += 1; } if (index >= buffer.Length) { break; } } return(index); }
public static IEnumerable <Workspaces.Range> GetErrors(Workspaces.Range range, Document doc) { ParserDetails pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { LanguageServer.Module.Compile(); } List <Range> result = new List <Workspaces.Range>(); foreach (IParseTree p in pd.Errors) { ErrorNodeImpl q = p as Antlr4.Runtime.Tree.ErrorNodeImpl; if (q == null) { continue; } if (q.Payload == null) { continue; } int y = q.Payload.StartIndex; int z = q.Payload.StopIndex; if (y < 0) { y = 0; } if (z < 0) { z = 0; } int a = y; int b = z + 1; int start_token_start = a; int end_token_end = b; if (start_token_start > range.End.Value) { continue; } if (end_token_end < range.Start.Value) { continue; } Range r = new Workspaces.Range(new Workspaces.Index(a), new Workspaces.Index(b)); result.Add(r); } return(result); }
public static Dictionary <string, string> RemoveUselessParserProductions(int pos, Document document) { var result = new Dictionary <string, string>(); // Check if lexer grammar. AntlrGrammarDetails pd_parser = ParserDetailsFactory.Create(document) as AntlrGrammarDetails; ExtractGrammarType lp = new ExtractGrammarType(); ParseTreeWalker.Default.Walk(lp, pd_parser.ParseTree); var is_lexer = lp.Type == ExtractGrammarType.GrammarType.Lexer; if (is_lexer) { // We don't consider lexer grammars. return(result); } // Consider only the target grammar. Table table = new Table(pd_parser, document); table.ReadRules(); table.FindPartitions(); table.FindStartRules(); List <Pair <int, int> > deletions = new List <Pair <int, int> >(); foreach (var r in table.rules) { if (r.is_parser_rule && r.is_used == false) { deletions.Add(new Pair <int, int>(r.start_index, r.end_index)); } } deletions = deletions.OrderBy(p => p.a).ThenBy(p => p.b).ToList(); StringBuilder sb = new StringBuilder(); int previous = 0; string old_code = document.Code; foreach (var l in deletions) { int index_start = l.a; int len = l.b - l.a; string pre = old_code.Substring(previous, index_start - previous); sb.Append(pre); previous = index_start + len; } string rest = old_code.Substring(previous); sb.Append(rest); string new_code = sb.ToString(); result.Add(document.FullPath, new_code); return(result); }
public static IList <Location> FindDef(int index, Document doc) { var result = new List <Location>(); if (doc == null) { return(result); } var ref_pt = Util.Find(index, doc); if (ref_pt == null) { return(result); } var ref_pd = ParserDetailsFactory.Create(doc); ref_pd.Attributes.TryGetValue(ref_pt, out IList <Symtab.CombinedScopeSymbol> list_values); foreach (var value in list_values) { if (value == null) { continue; } var @ref = value as Symtab.ISymbol; if (@ref == null) { continue; } var def = @ref.resolve(); if (def == null) { continue; } var def_file = def.file; if (def_file == null) { continue; } var def_item = Workspaces.Workspace.Instance.FindDocument(def_file); if (def_item == null) { continue; } var new_loc = new Location() { Range = new Workspaces.Range(def.Token.StartIndex, def.Token.StopIndex), Uri = def_item }; result.Add(new_loc); } return(result); }
public static List <string> Completion(int char_index, Document document) { ParserDetails ref_pd = ParserDetailsFactory.Create(document); if (ref_pd.ParseTree == null) { LanguageServer.Module.Compile(); } List <string> result = ref_pd.Candidates(char_index); return(result); }
public static IEnumerable <Workspaces.Range> GetErrors(Workspaces.Range range, Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(new List <Workspaces.Range>()); } var result = new List <Workspaces.Range>(); foreach (var p in pd.Errors) { var q = p as Antlr4.Runtime.Tree.ErrorNodeImpl; if (q == null) { continue; } if (q.Payload == null) { continue; } var y = q.Payload.StartIndex; var z = q.Payload.StopIndex; if (y < 0) { y = 0; } if (z < 0) { z = 0; } var a = y; var b = z + 1; int start_token_start = a; int end_token_end = b; if (start_token_start > range.End.Value) { continue; } if (end_token_end < range.Start.Value) { continue; } var r = new Workspaces.Range(new Workspaces.Index(a), new Workspaces.Index(b)); result.Add(r); } return(result); }
private static Digraph <ParserDetails> ConstructGraph(IEnumerable <ParserDetails> to_do) { Digraph <ParserDetails> g = new Digraph <ParserDetails>(); HashSet <ParserDetails> done = new HashSet <ParserDetails>(); Stack <ParserDetails> stack = new Stack <ParserDetails>(); foreach (ParserDetails f in to_do) { stack.Push(f); } while (stack.Count > 0) { ParserDetails f = stack.Pop(); g.AddVertex(f); done.Add(f); foreach (string d in f.PropagateChangesTo) { Document d_doc = Workspace.Instance.FindDocument(d); ParserDetails d_pd = ParserDetailsFactory.Create(d_doc); if (done.Contains(d_pd)) { continue; } stack.Push(d_pd); } } foreach (ParserDetails v in g.Vertices) { HashSet <string> deps = v.PropagateChangesTo; Document doc = Workspace.Instance.FindDocument(v.FullFileName); ParserDetails pd = ParserDetailsFactory.Create(doc); foreach (string d in deps) { Document d_doc = Workspace.Instance.FindDocument(d); ParserDetails d_pd = ParserDetailsFactory.Create(d_doc); g.AddEdge(new DirectedEdge <ParserDetails>(pd, d_pd)); } } return(g); }
public static IEnumerable <DocumentSymbol> Get(Document doc) { ParserDetails pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { LanguageServer.Module.Compile(); } List <DocumentSymbol> combined = new List <DocumentSymbol>(); foreach (KeyValuePair <TerminalNodeImpl, int> p in pd.Tags) { if (p.Key.Symbol == null) { continue; } combined.Add( new DocumentSymbol() { name = p.Key.Symbol.Text, range = new Workspaces.Range(p.Key.Symbol.StartIndex, p.Key.Symbol.StopIndex), kind = p.Value }); } foreach (KeyValuePair <Antlr4.Runtime.IToken, int> p in pd.Comments) { combined.Add( new DocumentSymbol() { name = p.Key.Text, range = new Workspaces.Range(p.Key.StartIndex, p.Key.StopIndex), kind = p.Value }); } // Sort the list. IOrderedEnumerable <DocumentSymbol> sorted_combined_tokens = combined.OrderBy(t => t.range.Start.Value).ThenBy(t => t.range.End.Value); return(sorted_combined_tokens); }
public static IEnumerable <DocumentSymbol> Get(Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(new List <DocumentSymbol>()); } var combined = new List <DocumentSymbol>(); foreach (var p in pd.Tags) { if (p.Key.Symbol == null) { continue; } combined.Add( new DocumentSymbol() { name = p.Key.Symbol.Text, range = new Workspaces.Range(p.Key.Symbol.StartIndex, p.Key.Symbol.StopIndex), kind = p.Value }); } foreach (var p in pd.Comments) { combined.Add( new DocumentSymbol() { name = p.Key.Text, range = new Workspaces.Range(p.Key.StartIndex, p.Key.StopIndex), kind = p.Value }); } // Sort the list. var sorted_combined_tokens = combined.OrderBy(t => t.range.Start.Value).ThenBy(t => t.range.End.Value); return(sorted_combined_tokens); }
public static IParseTree Find(int index, Document document) { var pd = ParserDetailsFactory.Create(document); if (pd.ParseTree == null) { return(null); } foreach (var node in DFSVisitor.DFS(pd.ParseTree as ParserRuleContext)) { if (node as TerminalNodeImpl == null) { continue; } var leaf = node as TerminalNodeImpl; if (leaf.Symbol.StartIndex <= index && index <= leaf.Symbol.StopIndex) { return(leaf); } } return(null); }
public static IEnumerable <Location> GetDefs(Document doc) { List <Location> result = new List <Location>(); ParserDetails ref_pd = ParserDetailsFactory.Create(doc); if (ref_pd.ParseTree == null) { LanguageServer.Module.Compile(); } foreach (KeyValuePair <TerminalNodeImpl, int> value in ref_pd.Defs) { TerminalNodeImpl key = value.Key; Antlr4.Runtime.IToken sym = key.Payload; result.Add( new Location() { Range = new Workspaces.Range(sym.StartIndex, sym.StopIndex), Uri = Workspaces.Workspace.Instance.FindDocument(sym.InputStream.SourceName) }); } return(result); }
public static IParseTree Find(int index, Document document) { ParserDetails pd = ParserDetailsFactory.Create(document); if (pd.ParseTree == null) { LanguageServer.Module.Compile(); } foreach (IParseTree node in DFSVisitor.DFS(pd.ParseTree as ParserRuleContext)) { if (node as TerminalNodeImpl == null) { continue; } TerminalNodeImpl leaf = node as TerminalNodeImpl; if (leaf.Symbol.StartIndex <= index && index <= leaf.Symbol.StopIndex) { return(leaf); } } return(null); }
public static IEnumerable <Location> FindRefsAndDefs(int index, Document doc) { var result = new List <Location>(); var ref_pt = Util.Find(index, doc); if (ref_pt == null) { return(result); } var ref_pd = ParserDetailsFactory.Create(doc); ref_pd.Attributes.TryGetValue(ref_pt, out IList <Symtab.CombinedScopeSymbol> list_value); foreach (var value in list_value) { if (value == null) { continue; } var @ref = value as Symtab.ISymbol; if (@ref == null) { continue; } if (@ref.Token == null) { continue; } var def = @ref.resolve(); if (def == null) { continue; } if (def.Token == null) { continue; } List <Antlr4.Runtime.Tree.TerminalNodeImpl> where = new List <Antlr4.Runtime.Tree.TerminalNodeImpl>(); var refs = ref_pd.Refs.Where( (t) => { Antlr4.Runtime.Tree.TerminalNodeImpl x = t.Key; if (x == @ref.Token) { return(true); } ref_pd.Attributes.TryGetValue(x, out IList <Symtab.CombinedScopeSymbol> list_v); foreach (var v in list_v) { var vv = v as Symtab.ISymbol; if (vv == null) { return(false); } if (vv.resolve() == def) { return(true); } return(false); } return(false); }).Select(t => t.Key); if (def != null) { if (def.file == @ref.file) { result.Add( new Location() { Range = new Workspaces.Range(def.Token.StartIndex, def.Token.StopIndex), Uri = Workspaces.Workspace.Instance.FindDocument(def.file) }); } } foreach (var r in refs) { result.Add( new Location() { Range = new Workspaces.Range(r.Symbol.StartIndex, r.Symbol.StopIndex), Uri = Workspaces.Workspace.Instance.FindDocument(r.Symbol.InputStream.SourceName) }); } } return(result); }
public static IEnumerable <Location> FindRefsAndDefs(int index, Document doc) { List <Location> result = new List <Location>(); IParseTree ref_pt = Util.Find(index, doc); if (ref_pt == null) { return(result); } ParserDetails ref_pd = ParserDetailsFactory.Create(doc); if (ref_pd.ParseTree == null) { LanguageServer.Module.Compile(); } ref_pd.Attributes.TryGetValue(ref_pt, out IList <Symtab.CombinedScopeSymbol> list_value); if (list_value == null) { return(result); } ISymbol found_def = null; ISymbol found_ref = null; foreach (CombinedScopeSymbol value in list_value) { if (value == null) { continue; } ISymbol @ref = value as Symtab.ISymbol; if (@ref == null) { continue; } if (@ref.Token == null) { continue; } found_ref = @ref; ISymbol def = @ref.resolve(); if (def == null) { continue; } if (def.Token == null) { continue; } found_def = def; break; } List <Antlr4.Runtime.Tree.TerminalNodeImpl> where = new List <Antlr4.Runtime.Tree.TerminalNodeImpl>(); // Go through all files and look for refs. foreach (KeyValuePair <string, List <string> > d in AntlrGrammarDetails._dependent_grammars) { Document d_doc = Workspaces.Workspace.Instance.FindDocument(d.Key); ParserDetails d_pd = ParserDetailsFactory.Create(d_doc); if (d_pd.ParseTree == null) { continue; } IEnumerable <TerminalNodeImpl> refs = d_pd.Refs.Where( (t) => { Antlr4.Runtime.Tree.TerminalNodeImpl x = t.Key; if (x.Symbol == found_ref.Token) { return(true); } d_pd.Attributes.TryGetValue(x, out IList <Symtab.CombinedScopeSymbol> list_v); if (list_v == null) { return(false); } foreach (CombinedScopeSymbol v in list_v) { ISymbol vv = v as Symtab.ISymbol; if (vv == null) { return(false); } if (vv.resolve() == found_def) { return(true); } return(false); } return(false); }).Select(t => t.Key); foreach (TerminalNodeImpl r in refs) { result.Add( new Location() { Range = new Workspaces.Range(r.Symbol.StartIndex, r.Symbol.StopIndex), Uri = Workspaces.Workspace.Instance.FindDocument(r.Symbol.InputStream.SourceName) }); } } if (found_def != null) { result.Add( new Location() { Range = new Workspaces.Range(found_def.Token.StartIndex, found_def.Token.StopIndex), Uri = Workspaces.Workspace.Instance.FindDocument(found_def.file) }); } return(result); }
public static IList <Location> FindDef(int index, Document doc) { List <Location> result = new List <Location>(); if (doc == null) { return(result); } IParseTree ref_pt = Util.Find(index, doc); if (ref_pt == null) { return(result); } ParserDetails ref_pd = ParserDetailsFactory.Create(doc); if (ref_pd.ParseTree == null) { LanguageServer.Module.Compile(); } ref_pd.Attributes.TryGetValue(ref_pt, out IList <Symtab.CombinedScopeSymbol> list_values); if (list_values == null) { return(result); } foreach (CombinedScopeSymbol value in list_values) { if (value == null) { continue; } ISymbol @ref = value as Symtab.ISymbol; if (@ref == null) { continue; } ISymbol def = @ref.resolve(); if (def == null) { continue; } string def_file = def.file; if (def_file == null) { continue; } Document def_item = Workspaces.Workspace.Instance.FindDocument(def_file); if (def_item == null) { continue; } Location new_loc = new Location() { Range = new Workspaces.Range(def.Token.StartIndex, def.Token.StopIndex), Uri = def_item }; result.Add(new_loc); } return(result); }
public static Dictionary <string, string> MoveStartRuleToTop(int pos, Document document) { var result = new Dictionary <string, string>(); // Check if lexer grammar. AntlrGrammarDetails pd_parser = ParserDetailsFactory.Create(document) as AntlrGrammarDetails; ExtractGrammarType lp = new ExtractGrammarType(); ParseTreeWalker.Default.Walk(lp, pd_parser.ParseTree); var is_lexer = lp.Type == ExtractGrammarType.GrammarType.Lexer; if (is_lexer) { // We don't consider lexer grammars. return(result); } // Consider only the target grammar. Table table = new Table(pd_parser, document); table.ReadRules(); table.FindPartitions(); table.FindStartRules(); string old_code = document.Code; List <Pair <int, int> > move = new List <Pair <int, int> >(); foreach (var r in table.rules) { if (r.is_parser_rule && r.is_start == true) { move.Add(new Pair <int, int>(r.start_index, r.end_index)); } } move = move.OrderBy(p => p.a).ThenBy(p => p.b).ToList(); var find_first_rule = new FindFirstRule(); ParseTreeWalker.Default.Walk(find_first_rule, pd_parser.ParseTree); var first_rule = find_first_rule.First; if (first_rule == null) { return(result); } var insertion = first_rule.SourceInterval.a; var insertion_tok = pd_parser.TokStream.Get(insertion); var insertion_ind = insertion_tok.StartIndex; if (move.Count == 1 && move[0].a == insertion_ind) { return(result); } StringBuilder sb = new StringBuilder(); int previous = 0; { int index_start = insertion_ind; int len = 0; string pre = old_code.Substring(previous, index_start - previous); sb.Append(pre); previous = index_start + len; } foreach (var l in move) { int index_start = l.a; int len = l.b - l.a; string add = old_code.Substring(index_start, len); sb.Append(add); } foreach (var l in move) { int index_start = l.a; int len = l.b - l.a; string pre = old_code.Substring(previous, index_start - previous); sb.Append(pre); previous = index_start + len; } string rest = old_code.Substring(previous); sb.Append(rest); string new_code = sb.ToString(); result.Add(document.FullPath, new_code); return(result); }
public static Dictionary <string, string> ReorderParserRules(int pos, Document document, LspAntlr.ReorderType type) { var result = new Dictionary <string, string>(); // Check if lexer grammar. AntlrGrammarDetails pd_parser = ParserDetailsFactory.Create(document) as AntlrGrammarDetails; ExtractGrammarType lp = new ExtractGrammarType(); ParseTreeWalker.Default.Walk(lp, pd_parser.ParseTree); var is_lexer = lp.Type == ExtractGrammarType.GrammarType.Lexer; if (is_lexer) { return(result); } Table table = new Table(pd_parser, document); table.ReadRules(); table.FindPartitions(); table.FindStartRules(); // Find new order or rules. string old_code = document.Code; List <Pair <int, int> > reorder = new List <Pair <int, int> >(); if (type == LspAntlr.ReorderType.DFS) { Digraph <string> graph = new Digraph <string>(); foreach (var r in table.rules) { if (!r.is_parser_rule) { continue; } graph.AddVertex(r.LHS); } foreach (var r in table.rules) { if (!r.is_parser_rule) { continue; } var j = r.RHS; //j.Reverse(); foreach (var rhs in j) { var sym = table.rules.Where(t => t.LHS == rhs).FirstOrDefault(); if (!sym.is_parser_rule) { continue; } var e = new DirectedEdge <string>(r.LHS, rhs); graph.AddEdge(e); } } List <string> starts = new List <string>(); foreach (var r in table.rules) { if (r.is_parser_rule && r.is_start) { starts.Add(r.LHS); } } Graphs.DepthFirstOrder <string, DirectedEdge <string> > sort = new DepthFirstOrder <string, DirectedEdge <string> >(graph, starts); var ordered = sort.ToList(); foreach (var s in ordered) { var row = table.rules[table.nt_to_index[s]]; reorder.Add(new Pair <int, int>(row.start_index, row.end_index)); } } else if (type == LspAntlr.ReorderType.BFS) { Digraph <string> graph = new Digraph <string>(); foreach (var r in table.rules) { if (!r.is_parser_rule) { continue; } graph.AddVertex(r.LHS); } foreach (var r in table.rules) { if (!r.is_parser_rule) { continue; } var j = r.RHS; //j.Reverse(); foreach (var rhs in j) { var sym = table.rules.Where(t => t.LHS == rhs).FirstOrDefault(); if (!sym.is_parser_rule) { continue; } var e = new DirectedEdge <string>(r.LHS, rhs); graph.AddEdge(e); } } List <string> starts = new List <string>(); foreach (var r in table.rules) { if (r.is_parser_rule && r.is_start) { starts.Add(r.LHS); } } Graphs.BreadthFirstOrder <string, DirectedEdge <string> > sort = new BreadthFirstOrder <string, DirectedEdge <string> >(graph, starts); var ordered = sort.ToList(); foreach (var s in ordered) { var row = table.rules[table.nt_to_index[s]]; reorder.Add(new Pair <int, int>(row.start_index, row.end_index)); } } else if (type == LspAntlr.ReorderType.Alphabetically) { var ordered = table.rules .Where(r => r.is_parser_rule) .Select(r => r.LHS) .OrderBy(r => r).ToList(); foreach (var s in ordered) { var row = table.rules[table.nt_to_index[s]]; reorder.Add(new Pair <int, int>(row.start_index, row.end_index)); } } else { return(result); } StringBuilder sb = new StringBuilder(); int previous = 0; { int index_start = table.rules[0].start_index; int len = 0; string pre = old_code.Substring(previous, index_start - previous); sb.Append(pre); previous = index_start + len; } foreach (var l in reorder) { int index_start = l.a; int len = l.b - l.a; string add = old_code.Substring(index_start, len); sb.Append(add); } // Now add all non-parser rules. foreach (var r in table.rules) { if (r.is_parser_rule) { continue; } int index_start = r.start_index; int len = r.end_index - r.start_index; string add = old_code.Substring(index_start, len); sb.Append(add); } //string rest = old_code.Substring(previous); //sb.Append(rest); string new_code = sb.ToString(); result.Add(document.FullPath, new_code); return(result); }
public static QuickInfo GetQuickInfo(int index, Document doc) { ParserDetails pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { LanguageServer.Module.Compile(); } Antlr4.Runtime.Tree.IParseTree pt = LanguageServer.Util.Find(index, doc); IGrammarDescription gd = GrammarDescriptionFactory.Create(doc.FullPath); if (pt == null) { return(null); } Antlr4.Runtime.Tree.IParseTree p = pt; pd.Attributes.TryGetValue(p, out IList <CombinedScopeSymbol> list_value); if (list_value == null) { return(null); } TerminalNodeImpl q = p as Antlr4.Runtime.Tree.TerminalNodeImpl; Range range = new Workspaces.Range(new Workspaces.Index(q.Symbol.StartIndex), new Workspaces.Index(q.Symbol.StopIndex + 1)); bool found = pd.Tags.TryGetValue(q, out int tag_type); if (!found) { return(null); } if (list_value == null || list_value.Count == 0) { return(new QuickInfo() { Display = gd.Map[tag_type], Range = range }); } if (list_value.Count == 1) { CombinedScopeSymbol value = list_value.First(); ISymbol name = value as Symtab.ISymbol; string show = name?.Name; if (value is Symtab.Literal) { show = ((Symtab.Literal)value).Cleaned; } if (gd.PopUpDefinition[tag_type] != null) { Func <ParserDetails, IParseTree, string> fun = gd.PopUpDefinition[tag_type]; string mess = fun(pd, p); if (mess != null) { return(new QuickInfo() { Display = mess, Range = range }); } } string display = gd.Map[tag_type] + "\n" + show; return(new QuickInfo() { Display = display, Range = range }); } { string display = "Ambiguous -- "; foreach (CombinedScopeSymbol value in list_value) { ISymbol name = value as Symtab.ISymbol; string show = name?.Name; if (value is Symtab.Literal) { show = ((Symtab.Literal)value).Cleaned; } if (gd.PopUpDefinition[tag_type] != null) { Func <ParserDetails, IParseTree, string> fun = gd.PopUpDefinition[tag_type]; string mess = fun(pd, p); if (mess != null) { display = display + mess; } } else { display = display + gd.Map[tag_type] + "\n" + show; } } return(new QuickInfo() { Display = display, Range = range }); } }
public static Dictionary <string, string> SplitCombineGrammars(int pos, Document document, bool split) { var result = new Dictionary <string, string>(); // Check if lexer grammar. AntlrGrammarDetails pd_parser = ParserDetailsFactory.Create(document) as AntlrGrammarDetails; ExtractGrammarType lp = new ExtractGrammarType(); ParseTreeWalker.Default.Walk(lp, pd_parser.ParseTree); if (split && lp.Type != ExtractGrammarType.GrammarType.Combined) { return(null); } if ((!split) && lp.Type != ExtractGrammarType.GrammarType.Parser) { return(null); } Table table = new Table(pd_parser, document); table.ReadRules(); table.FindPartitions(); table.FindStartRules(); string old_code = document.Code; if (split) { // Create a parser and lexer grammar. StringBuilder sb_parser = new StringBuilder(); StringBuilder sb_lexer = new StringBuilder(); int previous_parser = 0; int previous_lexer = 0; var root = pd_parser.ParseTree as ANTLRv4Parser.GrammarSpecContext; if (root == null) { return(null); } int grammar_type_index = 0; if (root.DOC_COMMENT() != null) { grammar_type_index++; } var grammar_type_tree = root.grammarType(); var id = root.id(); var semi_tree = root.SEMI(); var rules_tree = root.rules(); string pre = old_code.Substring(0, pd_parser.TokStream.Get(grammar_type_tree.SourceInterval.a).StartIndex - 0); sb_parser.Append(pre); sb_lexer.Append(pre); sb_parser.Append("parser grammar " + id.GetText() + "Parser;" + Environment.NewLine); sb_lexer.Append("lexer grammar " + id.GetText() + "Lexer;" + Environment.NewLine); int x1 = pd_parser.TokStream.Get(semi_tree.SourceInterval.b).StopIndex + 1; int x2 = pd_parser.TokStream.Get(rules_tree.SourceInterval.a).StartIndex; string n1 = old_code.Substring(x1, x2 - x1); sb_parser.Append(n1); sb_lexer.Append(n1); int end = 0; for (int i = 0; i < table.rules.Count; ++i) { var r = table.rules[i]; // Partition rule symbols. if (r.is_parser_rule) { string n2 = old_code.Substring(r.start_index, r.end_index - r.start_index); sb_parser.Append(n2); } else { string n2 = old_code.Substring(r.start_index, r.end_index - r.start_index); sb_lexer.Append(n2); } end = r.end_index + 1; } if (end < old_code.Length) { string rest = old_code.Substring(end); sb_parser.Append(rest); sb_lexer.Append(rest); } string g4_file_path = document.FullPath; string current_dir = Path.GetDirectoryName(g4_file_path); if (current_dir == null) { return(null); } string orig_name = Path.GetFileNameWithoutExtension(g4_file_path); string new_code_parser = sb_parser.ToString(); string new_parser_ffn = current_dir + Path.DirectorySeparatorChar + orig_name + "Parser.g4"; string new_lexer_ffn = current_dir + Path.DirectorySeparatorChar + orig_name + "Lexer.g4"; string new_code_lexer = sb_lexer.ToString(); result.Add(new_parser_ffn, new_code_parser); result.Add(new_lexer_ffn, new_code_lexer); result.Add(g4_file_path, null); } else { // Parse lexer grammar. HashSet <string> read_files = new HashSet <string>(); read_files.Add(document.FullPath); for (; ;) { int before_count = read_files.Count; foreach (var f in read_files) { var additional = AntlrGrammarDetails._dependent_grammars.Where( t => t.Value.Contains(f)).Select( t => t.Key).ToList(); read_files = read_files.Union(additional).ToHashSet(); } int after_count = read_files.Count; if (after_count == before_count) { break; } } List <AntlrGrammarDetails> lexers = new List <AntlrGrammarDetails>(); foreach (string f in read_files) { Workspaces.Document lexer_document = Workspaces.Workspace.Instance.FindDocument(f); if (lexer_document == null) { continue; } AntlrGrammarDetails x = ParserDetailsFactory.Create(lexer_document) as AntlrGrammarDetails; lexers.Add(x); } if (lexers.Count != 2) { return(null); } var pd_lexer = lexers[1]; Workspaces.Document ldocument = Workspaces.Workspace.Instance.FindDocument(pd_lexer.FullFileName); Table lexer_table = new Table(pd_lexer, ldocument); lexer_table.ReadRules(); lexer_table.FindPartitions(); lexer_table.FindStartRules(); // Create a combined parser grammar. StringBuilder sb_parser = new StringBuilder(); var root = pd_parser.ParseTree as ANTLRv4Parser.GrammarSpecContext; if (root == null) { return(null); } int grammar_type_index = 0; if (root.DOC_COMMENT() != null) { grammar_type_index++; } var grammar_type_tree = root.grammarType(); var id = root.id(); var semi_tree = root.SEMI(); var rules_tree = root.rules(); string pre = old_code.Substring(0, pd_parser.TokStream.Get(grammar_type_tree.SourceInterval.a).StartIndex - 0); sb_parser.Append(pre); sb_parser.Append("grammar " + id.GetText().Replace("Parser", "") + ";" + Environment.NewLine); int x1 = pd_parser.TokStream.Get(semi_tree.SourceInterval.b).StopIndex + 1; int x2 = pd_parser.TokStream.Get(rules_tree.SourceInterval.a).StartIndex; string n1 = old_code.Substring(x1, x2 - x1); sb_parser.Append(n1); int end = 0; for (int i = 0; i < table.rules.Count; ++i) { var r = table.rules[i]; if (r.is_parser_rule) { string n2 = old_code.Substring(r.start_index, r.end_index - r.start_index); sb_parser.Append(n2); } end = r.end_index + 1; } if (end < old_code.Length) { string rest = old_code.Substring(end); sb_parser.Append(rest); } end = 0; var lexer_old_code = ldocument.Code; for (int i = 0; i < lexer_table.rules.Count; ++i) { var r = lexer_table.rules[i]; if (!r.is_parser_rule) { string n2 = lexer_old_code.Substring(r.start_index, r.end_index - r.start_index); sb_parser.Append(n2); } end = r.end_index + 1; } if (end < lexer_old_code.Length) { string rest = lexer_old_code.Substring(end); sb_parser.Append(rest); } string g4_file_path = document.FullPath; string current_dir = Path.GetDirectoryName(g4_file_path); if (current_dir == null) { return(null); } string orig_name = Path.GetFileName(g4_file_path); var new_name = orig_name.Replace("Parser.g4", ""); string new_code_parser = sb_parser.ToString(); string new_parser_ffn = current_dir + Path.DirectorySeparatorChar + new_name + ".g4"; result.Add(new_parser_ffn, new_code_parser); result.Add(pd_parser.FullFileName, null); result.Add(pd_lexer.FullFileName, null); } return(result); }
public virtual void GatherRefs() { Workspaces.Document item = Item; string ffn = item.FullPath; IGrammarDescription gd = GrammarDescriptionFactory.Create(ffn); if (gd == null) { throw new Exception(); } for (int classification = 0; classification < gd.Identify.Count; ++classification) { Func <IGrammarDescription, Dictionary <IParseTree, IList <CombinedScopeSymbol> >, IParseTree, bool> fun = gd.Identify[classification]; if (fun == null) { continue; } IEnumerable <IParseTree> it = AllNodes.Where(t => fun(gd, Attributes, t)); foreach (IParseTree t in it) { TerminalNodeImpl x = (t as TerminalNodeImpl); if (x == null) { continue; } if (x.Symbol == null) { continue; } try { Attributes.TryGetValue(x, out IList <CombinedScopeSymbol> attr_list); if (attr_list == null) { continue; } foreach (CombinedScopeSymbol attr in attr_list) { Tags.Add(x, classification); if (attr == null) { continue; } ISymbol sym = attr as Symtab.ISymbol; if (sym == null) { continue; } ISymbol def = sym.resolve(); if (def != null && def.file != null && def.file != "" && def.file != ffn) { Workspaces.Document def_item = Workspaces.Workspace.Instance.FindDocument(def.file); ParserDetails def_pd = ParserDetailsFactory.Create(def_item); def_pd.PropagateChangesTo.Add(ffn); } Refs.Add(x, classification); } } catch (ArgumentException) { // Duplicate } } } }
public virtual void GatherRefs() { var item = Item; var ffn = item.FullPath; IGrammarDescription gd = GrammarDescriptionFactory.Create(ffn); if (gd == null) { throw new Exception(); } for (int classification = 0; classification < gd.Identify.Count; ++classification) { var fun = gd.Identify[classification]; if (fun == null) { continue; } var it = this.AllNodes.Where(t => fun(gd, this.Attributes, t)); foreach (var t in it) { var x = (t as TerminalNodeImpl); if (x == null) { continue; } if (x.Symbol == null) { continue; } try { this.Attributes.TryGetValue(x, out IList <CombinedScopeSymbol> attr_list); if (attr_list == null) { continue; } foreach (var attr in attr_list) { this.Tags.Add(x, classification); if (attr == null) { continue; } var sym = attr as Symtab.ISymbol; if (sym == null) { continue; } var def = sym.resolve(); if (def != null && def.file != null && def.file != "" && def.file != ffn) { var def_item = Workspaces.Workspace.Instance.FindDocument(def.file); var def_pd = ParserDetailsFactory.Create(def_item); def_pd.PropagateChangesTo.Add(ffn); } this.Refs.Add(x, classification); } } catch (ArgumentException) { // Duplicate } } } }
public static IEnumerable <DocumentSymbol> Get(Workspaces.Range range, Document doc) { var pd = ParserDetailsFactory.Create(doc); if (pd.ParseTree == null) { return(new List <DocumentSymbol>()); } var combined = new System.Collections.Generic.List <DocumentSymbol>(); foreach (var p in pd.Tags) { if (p.Key.Symbol == null) { continue; } int start_token_start = p.Key.Symbol.StartIndex; int end_token_end = p.Key.Symbol.StopIndex + 1; if (start_token_start > range.End.Value) { continue; } if (end_token_end < range.Start.Value) { continue; } combined.Add( new DocumentSymbol() { name = p.Key.Symbol.Text, range = new Workspaces.Range(p.Key.Symbol.StartIndex, p.Key.Symbol.StopIndex), kind = p.Value }); } foreach (var p in pd.Comments) { int start_token_start = p.Key.StartIndex; int end_token_end = p.Key.StopIndex + 1; if (start_token_start > range.End.Value) { continue; } if (end_token_end < range.Start.Value) { continue; } combined.Add( new DocumentSymbol() { name = p.Key.Text, range = new Workspaces.Range(p.Key.StartIndex, p.Key.StopIndex), kind = p.Value }); } // Sort the list. IEnumerable <DocumentSymbol> result; var sorted_combined_tokens = combined.OrderBy(t => t.range.Start.Value).ThenBy(t => t.range.End.Value); result = sorted_combined_tokens; return(result); }
public static Dictionary <string, string> ReplaceLiterals(int index, Document document) { Dictionary <string, string> result = new Dictionary <string, string>(); // Check if initial file is a grammar. AntlrGrammarDetails pd_parser = ParserDetailsFactory.Create(document) as AntlrGrammarDetails; ExtractGrammarType egt = new ExtractGrammarType(); ParseTreeWalker.Default.Walk(egt, pd_parser.ParseTree); var is_grammar = egt.Type == ExtractGrammarType.GrammarType.Parser || egt.Type == ExtractGrammarType.GrammarType.Combined || egt.Type == ExtractGrammarType.GrammarType.Lexer; if (!is_grammar) { return(result); } // Find all other grammars by walking dependencies (import, vocab, file names). HashSet <string> read_files = new HashSet <string>(); read_files.Add(document.FullPath); Dictionary <Workspaces.Document, List <TerminalNodeImpl> > every_damn_literal = new Dictionary <Workspaces.Document, List <TerminalNodeImpl> >(); for (; ;) { int before_count = read_files.Count; foreach (var f in read_files) { var additional = AntlrGrammarDetails._dependent_grammars.Where( t => t.Value.Contains(f)).Select( t => t.Key).ToList(); read_files = read_files.Union(additional).ToHashSet(); } foreach (var f in read_files) { var additional = AntlrGrammarDetails._dependent_grammars.Where( t => t.Key == f).Select( t => t.Value); foreach (var t in additional) { read_files = read_files.Union(t).ToHashSet(); } } int after_count = read_files.Count; if (after_count == before_count) { break; } } // Find rewrite rules, i.e., string literal to symbol name. Dictionary <string, string> subs = new Dictionary <string, string>(); foreach (string f in read_files) { Workspaces.Document whatever_document = Workspaces.Workspace.Instance.FindDocument(f); if (whatever_document == null) { continue; } AntlrGrammarDetails pd_whatever = ParserDetailsFactory.Create(whatever_document) as AntlrGrammarDetails; // Find literals in grammars. LiteralsGrammar lp_whatever = new LiteralsGrammar(pd_whatever); ParseTreeWalker.Default.Walk(lp_whatever, pd_whatever.ParseTree); List <TerminalNodeImpl> list_literals = lp_whatever.Literals; every_damn_literal[whatever_document] = list_literals; foreach (var lexer_literal in list_literals) { var old_name = lexer_literal.GetText(); // Given candidate, walk up tree to find lexer_rule. /* * ( ruleSpec * ( lexerRuleSpec * ( OFF_CHANNEL text=\r\n\r\n * ) * ( OFF_CHANNEL text=... * ) * (OFF_CHANNEL text =\r\n\r\n * ) * (OFF_CHANNEL text =... * ) * (OFF_CHANNEL text =\r\n\r\n * ) * (DEFAULT_TOKEN_CHANNEL i = 995 txt = NONASSOC tt = 1 * ) * (OFF_CHANNEL text =\r\n\t * ) * (DEFAULT_TOKEN_CHANNEL i = 997 txt =: tt = 29 * ) * (lexerRuleBlock * (lexerAltList * (lexerAlt * (lexerElements * (lexerElement * (lexerAtom * (terminal * (OFF_CHANNEL text = * ) * (DEFAULT_TOKEN_CHANNEL i = 999 txt = '%binary' tt = 8 * )))))))) * (OFF_CHANNEL text =\r\n\t * ) * (DEFAULT_TOKEN_CHANNEL i = 1001 txt =; tt = 32 * ) ) ) * * Make sure it fits the structure of the tree shown above. * */ var p1 = lexer_literal.Parent; if (p1.ChildCount != 1) { continue; } if (!(p1 is ANTLRv4Parser.TerminalContext)) { continue; } var p2 = p1.Parent; if (p2.ChildCount != 1) { continue; } if (!(p2 is ANTLRv4Parser.LexerAtomContext)) { continue; } var p3 = p2.Parent; if (p3.ChildCount != 1) { continue; } if (!(p3 is ANTLRv4Parser.LexerElementContext)) { continue; } var p4 = p3.Parent; if (p4.ChildCount != 1) { continue; } if (!(p4 is ANTLRv4Parser.LexerElementsContext)) { continue; } var p5 = p4.Parent; if (p5.ChildCount != 1) { continue; } if (!(p5 is ANTLRv4Parser.LexerAltContext)) { continue; } var p6 = p5.Parent; if (p6.ChildCount != 1) { continue; } if (!(p6 is ANTLRv4Parser.LexerAltListContext)) { continue; } var p7 = p6.Parent; if (p7.ChildCount != 1) { continue; } if (!(p7 is ANTLRv4Parser.LexerRuleBlockContext)) { continue; } var p8 = p7.Parent; if (p8.ChildCount != 4) { continue; } if (!(p8 is ANTLRv4Parser.LexerRuleSpecContext)) { continue; } var alt = p8.GetChild(0); var new_name = alt.GetText(); subs.Add(old_name, new_name); } } // Find string literals in parser and combined grammars and substitute. Dictionary <TerminalNodeImpl, string> rewrites = new Dictionary <TerminalNodeImpl, string>(); foreach (var pair in every_damn_literal) { var doc = pair.Key; var list_literals = pair.Value; foreach (var l in list_literals) { bool no = false; // Make sure this literal does not appear in lexer rule. for (IRuleNode p = l.Parent; p != null; p = p.Parent) { if (p is ANTLRv4Parser.LexerRuleSpecContext) { no = true; break; } } if (no) { continue; } subs.TryGetValue(l.GetText(), out string re); if (re != null) { rewrites.Add(l, re); } } } var files = rewrites.Select(r => r.Key.Payload.TokenSource.SourceName).OrderBy(q => q).Distinct(); var documents = files.Select(f => { return(Workspaces.Workspace.Instance.FindDocument(f)); }).ToList(); foreach (Document f in documents) { string fn = f.FullPath; var per_file_changes = rewrites.Where(z => z.Key.Payload.TokenSource.SourceName == f.FullPath) .OrderBy(z => z.Key.Payload.TokenIndex).ToList(); StringBuilder sb = new StringBuilder(); int previous = 0; string code = f.Code; foreach (var l in per_file_changes) { string original_text = l.Key.Payload.Text; int index_start = l.Key.Payload.StartIndex; int len = l.Key.Payload.Text.Length; string new_text = l.Value; string pre = code.Substring(previous, index_start - previous); sb.Append(pre); sb.Append(new_text); previous = index_start + len; } string rest = code.Substring(previous); sb.Append(rest); string new_code = sb.ToString(); result.Add(fn, new_code); } return(result); }
public static List <ParserDetails> Compile() { try { Workspace ws = Workspaces.Workspace.Instance; // Get all changed files. HashSet <ParserDetails> to_do = new HashSet <ParserDetails>(); DoAgain: // Get current directory, and add all grammar files. foreach (Document document in Workspaces.DFSContainer.DFS(ws)) { string file_name = document.FullPath; if (file_name == null) { continue; } Container parent = document.Parent; IGrammarDescription gd = LanguageServer.GrammarDescriptionFactory.Create(file_name); if (gd == null) { continue; } // Get suffix of file_name. string extension = System.IO.Path.GetExtension(file_name); string directory = System.IO.Path.GetDirectoryName(file_name); foreach (string file in System.IO.Directory.GetFiles(directory)) { if (System.IO.Path.GetExtension(file) != extension) { continue; } IGrammarDescription g2 = LanguageServer.GrammarDescriptionFactory.Create(file); if (g2 == null) { continue; } Document x = Workspaces.Workspace.Instance.FindDocument(file); if (x == null) { // Add document. Container proj = parent; Document new_doc = new Workspaces.Document(file); proj.AddChild(new_doc); } ParserDetails p2 = ParserDetailsFactory.Create(document); if (!p2.Changed) { continue; } to_do.Add(p2); } } foreach (Document document in Workspaces.DFSContainer.DFS(ws)) { string file_name = document.FullPath; if (file_name == null) { continue; } IGrammarDescription gd = LanguageServer.GrammarDescriptionFactory.Create(file_name); if (gd == null) { continue; } // file_name can be a URI, so this doesn't make sense. //if (!System.IO.File.Exists(file_name)) continue; ParserDetails pd = ParserDetailsFactory.Create(document); if (!pd.Changed) { continue; } to_do.Add(pd); } Digraph <ParserDetails> g = ConstructGraph(to_do); foreach (ParserDetails v in g.Vertices) { v.Item.Changed = true; // Force. v.Parse(); } bool changed = true; for (int pass = 0; changed; pass++) { changed = false; foreach (ParserDetails v in g.Vertices) { int number_of_passes = v.Passes.Count; if (pass < number_of_passes) { bool reset = v.Pass(pass); if (reset) { goto DoAgain; } changed = true; } } } foreach (ParserDetails v in g.Vertices) { v.GatherDefs(); } foreach (ParserDetails v in g.Vertices) { v.GatherRefs(); } foreach (ParserDetails v in g.Vertices) { v.GatherErrors(); } return(g.Vertices.ToList()); } catch (Exception e) { Logger.Log.Notify(e.ToString()); } return(new List <ParserDetails>()); }
public static TextEdit[] Reformat(Document doc) { ParserDetails ref_pd = ParserDetailsFactory.Create(doc); string code = doc.Code; string corpus_location = Options.Option.GetString("CorpusLocation"); if (corpus_location == null) { TextEdit[] result = new TextEdit[] { }; return(result); } string ffn = doc.FullPath; if (ffn == null) { TextEdit[] result = new TextEdit[] { }; return(result); } IGrammarDescription grammar_description = LanguageServer.GrammarDescriptionFactory.Create(ffn); if (grammar_description == null) { TextEdit[] result = new TextEdit[] { }; return(result); } org.antlr.codebuff.Tool.unformatted_input = code; try { string result = org.antlr.codebuff.Tool.Main( new object[] { "-g", grammar_description.Name, "-lexer", grammar_description.Lexer, "-parser", grammar_description.Parser, "-rule", grammar_description.StartRule, "-files", grammar_description.FileExtension, "-corpus", corpus_location, "-inoutstring", "" }); List <TextEdit> edits = new List <TextEdit>(); diff_match_patch diff = new diff_match_patch(); List <Diff> diffs = diff.diff_main(code, result); List <Patch> patch = diff.patch_make(diffs); //patch.Reverse(); // Start edit session. int times = 0; int delta = 0; foreach (Patch p in patch) { times++; int start = p.start1 - delta; int offset = 0; foreach (Diff ed in p.diffs) { if (ed.operation == Operation.EQUAL) { //// Let's verify that. int len = ed.text.Length; //var tokenSpan = new SnapshotSpan(buffer.CurrentSnapshot, // new Span(start + offset, len)); //var tt = tokenSpan.GetText(); //if (ed.text != tt) //{ } offset = offset + len; } else if (ed.operation == Operation.DELETE) { int len = ed.text.Length; //var tokenSpan = new SnapshotSpan(buffer.CurrentSnapshot, // new Span(start + offset, len)); //var tt = tokenSpan.GetText(); //if (ed.text != tt) //{ } TextEdit edit = new TextEdit() { range = new Workspaces.Range( new Workspaces.Index(start + offset), new Workspaces.Index(start + offset + len)), NewText = "" }; offset = offset + len; edits.Add(edit); } else if (ed.operation == Operation.INSERT) { int len = ed.text.Length; TextEdit edit = new TextEdit() { range = new Workspaces.Range( new Workspaces.Index(start + offset), new Workspaces.Index(start + offset)), NewText = ed.text }; edits.Add(edit); } } delta = delta + (p.length2 - p.length1); } return(edits.ToArray()); } catch (Exception) { TextEdit[] result = new TextEdit[] { }; return(result); } }
public static List <ParserDetails> Compile() { try { var ws = Workspaces.Workspace.Instance; // Get all changed files. HashSet <ParserDetails> to_do = new HashSet <ParserDetails>(); DoAgain: foreach (var document in Workspaces.DFSContainer.DFS(ws)) { string file_name = document.FullPath; if (file_name == null) { continue; } var gd = LanguageServer.GrammarDescriptionFactory.Create(file_name); if (gd == null) { continue; } // file_name can be a URI, so this doesn't make sense. //if (!System.IO.File.Exists(file_name)) continue; var pd = ParserDetailsFactory.Create(document); if (!pd.Changed) { continue; } to_do.Add(pd); } Digraph <ParserDetails> g = ConstructGraph(to_do); foreach (var v in g.Vertices) { v.Item.Changed = true; // Force. v.Parse(); } var changed = true; for (int pass = 0; changed; pass++) { changed = false; foreach (var v in g.Vertices) { int number_of_passes = v.Passes.Count; if (pass < number_of_passes) { var reset = v.Pass(pass); if (reset) { goto DoAgain; } changed = true; } } } foreach (var v in g.Vertices) { v.GatherDefs(); } foreach (var v in g.Vertices) { v.GatherRefs(); } foreach (var v in g.Vertices) { v.GatherErrors(); } return(g.Vertices.ToList()); } catch (Exception e) { Logger.Log.Notify(e.ToString()); } return(new List <ParserDetails>()); }