public static TerminalNodeImpl Find(int index, Document document) { ParsingResults pd = ParsingResultsFactory.Create(document); var workspace = document.Workspace; if (pd.ParseTree == null) { new Module().Compile(workspace); } foreach (IParseTree node in DFSVisitor.DFS(pd.ParseTree as ParserRuleContext)) { if (node as TerminalNodeImpl == null) { continue; } TerminalNodeImpl leaf = node as TerminalNodeImpl; if (leaf.Symbol.StartIndex <= index && index <= leaf.Symbol.StopIndex) { return(leaf); } } return(null); }
public IEnumerable <Workspaces.Range> GetErrors(Workspaces.Range range, Document doc) { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } List <Range> result = new List <Workspaces.Range>(); foreach (IParseTree p in pd.Errors) { ErrorNodeImpl q = p as Antlr4.Runtime.Tree.ErrorNodeImpl; if (q == null) { continue; } if (q.Payload == null) { continue; } int y = q.Payload.StartIndex; int z = q.Payload.StopIndex; if (y < 0) { y = 0; } if (z < 0) { z = 0; } int a = y; int b = z + 1; int start_token_start = a; int end_token_end = b; if (start_token_start > range.End.Value) { continue; } if (end_token_end < range.Start.Value) { continue; } Range r = new Workspaces.Range(new Workspaces.Index(a), new Workspaces.Index(b)); result.Add(r); } return(result); }
public List <string> Completion(int char_index, Document document) { ParsingResults ref_pd = ParsingResultsFactory.Create(document); var workspace = document.Workspace; if (ref_pd.ParseTree == null) { Compile(workspace); } List <string> result = ref_pd.Candidates(char_index); return(result); }
public IEnumerable <Info> Get(int start, int end, Document doc) { try { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } List <Info> combined = new List <Info>(); foreach (KeyValuePair <Antlr4.Runtime.IToken, int> p in pd.ColorizedList) { if (p.Key == null) { continue; } var sym = p.Key; var st = sym.StartIndex; var en = sym.StopIndex + 1; if (end < st) { continue; } if (en < start) { continue; } int s1 = st > start ? st : start; int s2 = en < end ? en : end; combined.Add( new Info() { start = s1, end = s2, kind = p.Value }); ; } // Sort the list. IOrderedEnumerable <Info> sorted_combined_tokens = combined.OrderBy(t => t.start).ThenBy(t => t.end); return(sorted_combined_tokens); } catch (Exception) { } return(new List <Info>()); }
public IEnumerable <DocumentSymbol> GetSymbols(Document doc) { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } List <DocumentSymbol> combined = new List <DocumentSymbol>(); foreach (KeyValuePair <TerminalNodeImpl, int> p in pd.Defs) { if (p.Key == null) { continue; } combined.Add( new DocumentSymbol() { name = p.Key.GetText(), range = new Workspaces.Range(p.Key.Payload.StartIndex, p.Key.Payload.StopIndex), kind = p.Value }); } foreach (KeyValuePair <TerminalNodeImpl, int> p in pd.Refs) { if (p.Key == null) { continue; } combined.Add( new DocumentSymbol() { name = p.Key.GetText(), range = new Workspaces.Range(p.Key.Payload.StartIndex, p.Key.Payload.StopIndex), kind = p.Value }); } // Sort the list. IOrderedEnumerable <DocumentSymbol> sorted_combined_tokens = combined.OrderBy(t => t.range.Start.Value).ThenBy(t => t.range.End.Value); return(sorted_combined_tokens); }
private Digraph <ParsingResults> ConstructGraph(IEnumerable <ParsingResults> to_do) { Digraph <ParsingResults> g = new Digraph <ParsingResults>(); HashSet <ParsingResults> done = new HashSet <ParsingResults>(); Stack <ParsingResults> stack = new Stack <ParsingResults>(); foreach (ParsingResults f in to_do) { stack.Push(f); } while (stack.Count > 0) { ParsingResults f = stack.Pop(); var workspace = f.Item.Workspace; g.AddVertex(f); done.Add(f); foreach (string d in f.PropagateChangesTo) { Document d_doc = workspace.FindDocument(d); ParsingResults d_pd = ParsingResultsFactory.Create(d_doc); if (done.Contains(d_pd)) { continue; } stack.Push(d_pd); } } foreach (ParsingResults v in g.Vertices) { HashSet <string> deps = v.PropagateChangesTo; var workspace = v.Item.Workspace; Document doc = workspace.FindDocument(v.FullFileName); ParsingResults pd = ParsingResultsFactory.Create(doc); foreach (string d in deps) { Document d_doc = workspace.FindDocument(d); ParsingResults d_pd = ParsingResultsFactory.Create(d_doc); g.AddEdge(new DirectedEdge <ParsingResults>(pd, d_pd)); } } return(g); }
public IEnumerable <TerminalNodeImpl> GetDefsLeaf(Document doc) { List <TerminalNodeImpl> result = new List <TerminalNodeImpl>(); ParsingResults ref_pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (ref_pd.ParseTree == null) { Compile(workspace); } foreach (KeyValuePair <TerminalNodeImpl, int> value in ref_pd.Defs) { TerminalNodeImpl key = value.Key; result.Add(key); } return(result); }
public int GetTag(int index, Document doc) { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } Antlr4.Runtime.Tree.IParseTree pt = LanguageServer.Util.Find(index, doc); var gd = ParserDescriptionFactory.Create(doc); if (pt == null) { return(-1); } Antlr4.Runtime.Tree.IParseTree p = pt; TerminalNodeImpl q = p as Antlr4.Runtime.Tree.TerminalNodeImpl; bool found = pd.PopupList.TryGetValue(q, out int tag_type); if (found) { return(tag_type); } if (q.Symbol == null) { return(-1); } bool found2 = pd.Comments.TryGetValue(q.Symbol, out int tag2); if (found2) { return(tag2); } return(-1); }
public static ParsingResults Create(Workspaces.Document document) { if (document == null) { return(null); } string ffn = document.FullPath; foreach (KeyValuePair <string, ParsingResults> pd in _per_file_parser_details) { if (pd.Key == ffn) { return(pd.Value); } } ParsingResults result = ParserDescriptionFactory.Create(document); _per_file_parser_details[ffn] = result; return(result); }
public DocumentSymbol GetDocumentSymbol(int index, Document doc) { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } Antlr4.Runtime.Tree.IParseTree pt = LanguageServer.Util.Find(index, doc); var gd = ParserDescriptionFactory.Create(doc); if (pt == null) { return(default(DocumentSymbol)); } Antlr4.Runtime.Tree.IParseTree p = pt; TerminalNodeImpl q = p as Antlr4.Runtime.Tree.TerminalNodeImpl; bool found = pd.PopupList.TryGetValue(q, out int tag_type); if (!found) { return(null); } if (q.Symbol == null) { return(null); } return(new DocumentSymbol() { name = q.Symbol.Text, range = new Workspaces.Range(q.Symbol.StartIndex, q.Symbol.StopIndex), kind = tag_type }); }
public IEnumerable <Location> GetDefs(Document doc) { List <Location> result = new List <Location>(); ParsingResults ref_pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (ref_pd.ParseTree == null) { Compile(workspace); } foreach (KeyValuePair <TerminalNodeImpl, int> value in ref_pd.Defs) { TerminalNodeImpl key = value.Key; Antlr4.Runtime.IToken sym = key.Payload; result.Add( new Location() { Range = new Workspaces.Range(sym.StartIndex, sym.StopIndex), Uri = workspace.FindDocument(sym.InputStream.SourceName) }); } return(result); }
public IEnumerable <Location> FindRefsAndDefs(int index, Document doc) { List <Location> result = new List <Location>(); IParseTree ref_pt = Util.Find(index, doc); if (ref_pt == null) { return(result); } var workspace = doc.Workspace; ParsingResults ref_pd = ParsingResultsFactory.Create(doc); if (ref_pd.ParseTree == null) { Compile(workspace); } ref_pd.Attributes.TryGetValue(ref_pt, out IList <Symtab.CombinedScopeSymbol> list_value); if (list_value == null) { return(result); } List <Symtab.ISymbol> found_defs = null; Symtab.ISymbol found_ref = null; foreach (CombinedScopeSymbol value in list_value) { if (value == null) { continue; } Symtab.ISymbol @ref = value as Symtab.ISymbol; if (@ref == null) { continue; } if (@ref.Token == null) { continue; } found_ref = @ref; List <Symtab.ISymbol> defs = @ref.resolve(); if (defs == null) { continue; } found_defs = defs; break; } if (found_defs != null) { foreach (var def in found_defs) { result.Add( new Location() { Range = new Workspaces.Range(def.Token.First().StartIndex, def.Token.First().StopIndex), Uri = workspace.FindDocument(def.file) }); var dd = def as BaseSymbol; foreach (var r in dd.Refs) { result.Add( new Location() { Range = new Workspaces.Range(r.Token.First().StartIndex, r.Token.First().StopIndex), Uri = workspace.FindDocument(r.file) }); } } } return(result); }
public IList <Location> FindDefs(int index, Document doc) { List <Location> result = new List <Location>(); if (doc == null) { return(result); } var workspace = doc.Workspace; IParseTree ref_pt = Util.Find(index, doc); if (ref_pt == null) { return(result); } ParsingResults ref_pd = ParsingResultsFactory.Create(doc); if (ref_pd.ParseTree == null) { Compile(workspace); } ref_pd.Attributes.TryGetValue(ref_pt, out IList <Symtab.CombinedScopeSymbol> list_values); if (list_values == null) { return(result); } foreach (CombinedScopeSymbol value in list_values) { if (value == null) { continue; } Symtab.ISymbol @ref = value as Symtab.ISymbol; if (@ref == null) { continue; } List <Symtab.ISymbol> defs = @ref.resolve(); if (defs == null) { continue; } foreach (var def in defs) { string def_file = def.file; if (def_file == null) { continue; } Document def_item = workspace.FindDocument(def_file); if (def_item == null) { continue; } Location new_loc = new Location() { Range = new Workspaces.Range(def.Token.First().StartIndex, def.Token.First().StopIndex), Uri = def_item }; result.Add(new_loc); } } return(result); }
public static ParsingResults Create(Workspaces.Document document) { if (_parsing_results.ContainsKey(document) && _parsing_results[document] != null) { return(_parsing_results[document]); } ParsingResults result = null; if (document.ParseAs != null) { var parse_as = document.ParseAs; if (parse_as == "antlr2") { result = new Antlr2ParsingResults(document); } else if (parse_as == "antlr3") { result = new Antlr3ParsingResults(document); } else if (parse_as == "antlr4") { result = new Antlr4ParsingResults(document); } else if (parse_as == "bison") { result = new BisonParsingResults(document); } else if (parse_as == "ebnf") { result = new W3CebnfParsingResults(document); } else if (parse_as == "iso14977") { result = new Iso14977ParsingResults(document); } else if (parse_as == "lbnf") { result = new lbnfParsingResults(document); } else { result = null; } } else if (document.FullPath.EndsWith(".ebnf")) { document.ParseAs = "ebnf"; result = new W3CebnfParsingResults(document); } else if (document.FullPath.EndsWith(".g2")) { document.ParseAs = "antlr2"; result = new Antlr2ParsingResults(document); } else if (document.FullPath.EndsWith(".g3")) { document.ParseAs = "antlr3"; result = new Antlr3ParsingResults(document); } else if (document.FullPath.EndsWith(".g4")) { document.ParseAs = "antlr4"; result = new Antlr4ParsingResults(document); } else if (document.FullPath.EndsWith(".y")) { document.ParseAs = "bison"; result = new BisonParsingResults(document); } else if (document.FullPath.EndsWith(".iso14977")) { document.ParseAs = "iso14977"; result = new Iso14977ParsingResults(document); } else if (document.FullPath.EndsWith(".cf")) { document.ParseAs = "lbnf"; result = new lbnfParsingResults(document); } else { result = null; } _parsing_results[document] = result; return(result); }
public QuickInfo GetQuickInfo(int index, Document doc) { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } Antlr4.Runtime.Tree.IParseTree pt = LanguageServer.Util.Find(index, doc); var gd = ParserDescriptionFactory.Create(doc); if (pt == null) { return(null); } Antlr4.Runtime.Tree.IParseTree p = pt; pd.Attributes.TryGetValue(p, out IList <CombinedScopeSymbol> list_value); if (list_value == null) { return(null); } TerminalNodeImpl q = p as Antlr4.Runtime.Tree.TerminalNodeImpl; Range range = new Workspaces.Range(new Workspaces.Index(q.Symbol.StartIndex), new Workspaces.Index(q.Symbol.StopIndex + 1)); bool found = pd.PopupList.TryGetValue(q, out int tag_type); if (!found) { return(null); } if (list_value == null || list_value.Count == 0) { return(new QuickInfo() { Display = gd.Map[tag_type], Range = range }); } if (list_value.Count == 1) { CombinedScopeSymbol value = list_value.First(); Symtab.ISymbol name = value as Symtab.ISymbol; string show = name?.Name; if (value is Symtab.Literal) { show = ((Symtab.Literal)value).Cleaned; } if (gd.PopUpDefinition[tag_type] != null) { Func <ParsingResults, IParseTree, string> fun = gd.PopUpDefinition[tag_type]; string mess = fun(pd, p); if (mess != null) { return(new QuickInfo() { Display = mess, Range = range }); } } string display = gd.Map[tag_type] + "\n" + show; return(new QuickInfo() { Display = display, Range = range }); } { string display = "Ambiguous -- "; foreach (CombinedScopeSymbol value in list_value) { Symtab.ISymbol name = value as Symtab.ISymbol; string show = name?.Name; if (value is Symtab.Literal) { show = ((Symtab.Literal)value).Cleaned; } if (gd.PopUpDefinition[tag_type] != null) { Func <ParsingResults, IParseTree, string> fun = gd.PopUpDefinition[tag_type]; string mess = fun(pd, p); if (mess != null) { display = display + mess; } } else { display = display + gd.Map[tag_type] + "\n" + show; } } return(new QuickInfo() { Display = display, Range = range }); } }
public List <ParsingResults> Compile(Workspace workspace) { try { // Parse changed files. If it's a combined grammar, no need to read // other files. If it's a parser grammar, get the lexer grammar and read // that. If it's a lexer grammar, read the parser grammar. // This has to be done intelligently. We don't want to parse every damn // file here. It's not going to be useful, and some files may not even work. // Let's find all changed files. Assume that the "workspace" contains // changed files. If it's not in the workspace already, then the file // has not changed, so we are not interested in it--unless it is a // depedency. DoAgain: HashSet <ParsingResults> to_do = new HashSet <ParsingResults>(); HashSet <ParsingResults> new_to_do = new HashSet <ParsingResults>(); foreach (Document document in Workspaces.DFSContainer.DFS(workspace)) { if (document.GetParseTree() != null && !document.Changed) { continue; } string file_name = document.FullPath; if (file_name == null) { continue; } Container parent = document.Parent; ParsingResults v = LanguageServer.ParserDescriptionFactory.Create(document); if (v == null && document.ParseAs == null) { continue; } if (new_to_do.Contains(v)) { continue; } new_to_do.Add(v); } for (; ;) { HashSet <ParsingResults> diff = new_to_do.Subtract(to_do); to_do = new_to_do; if (diff.Count == 0) { break; } // Do a quick bail-mode parse to determine Imports, InverseImports, and // workspace changes. foreach (ParsingResults v in diff) { v.Parse(); v.GetGrammarBasics(); } // Add all files to workspace. new_to_do = new HashSet <ParsingResults>(); foreach (KeyValuePair <string, HashSet <string> > dep in ParsingResults.InverseImports) { string name = dep.Key; Workspaces.Document x = workspace.FindDocument(name); ParsingResults v = LanguageServer.ParserDescriptionFactory.Create(x); new_to_do.Add(v); } } // Sort the grammars files based on includes to derive the symbol table. Digraph <ParsingResults> g = ConstructGraph(to_do); bool changed = true; for (int pass = 0; changed; pass++) { changed = false; foreach (ParsingResults v in g.Vertices) { int number_of_passes = v.Passes.Count; if (pass < number_of_passes) { try { bool reset = v.Pass(pass); if (reset) { goto DoAgain; } } #pragma warning disable 0168 catch (Exception eeks) { } #pragma warning restore 0168 changed = true; } } } foreach (ParsingResults v in g.Vertices) { v.GatherRefsDefsAndOthers(); } foreach (ParsingResults v in g.Vertices) { v.GatherErrors(); } return(g.Vertices.ToList()); } catch (Exception e) { Logger.Log.Notify(e.ToString()); } return(new List <ParsingResults>()); }
public Pass3Listener(ParsingResults pd) { _pd = pd; }
public TextEdit[] Reformat(Document doc) { ParsingResults ref_pd = ParsingResultsFactory.Create(doc); string code = doc.Code; string corpus_location = Options.Option.GetString("CorpusLocation"); if (corpus_location == null) { TextEdit[] result = new TextEdit[] { }; return(result); } string ffn = doc.FullPath; if (ffn == null) { TextEdit[] result = new TextEdit[] { }; return(result); } var grammar_description = LanguageServer.ParserDescriptionFactory.Create(doc); if (grammar_description == null) { TextEdit[] result = new TextEdit[] { }; return(result); } org.antlr.codebuff.Tool.unformatted_input = code; try { string result = org.antlr.codebuff.Tool.Main( new object[] { "-g", grammar_description.Name, "-lexer", grammar_description.Lexer, "-parser", grammar_description.Parser, "-rule", grammar_description.StartRule, "-files", grammar_description.FileExtension, "-corpus", corpus_location, "-inoutstring", "" }); List <TextEdit> edits = new List <TextEdit>(); Diff_match_patch diff = new Diff_match_patch(); List <Diff> diffs = diff.Diff_main(code, result); List <Patch> patch = diff.Patch_make(diffs); //patch.Reverse(); // Start edit session. int times = 0; int delta = 0; foreach (Patch p in patch) { times++; int start = p.start1 - delta; int offset = 0; foreach (Diff ed in p.diffs) { if (ed.operation == Operation.EQUAL) { //// Let's verify that. int len = ed.text.Length; //var tokenSpan = new SnapshotSpan(buffer.CurrentSnapshot, // new Span(start + offset, len)); //var tt = tokenSpan.GetText(); //if (ed.text != tt) //{ } offset = offset + len; } else if (ed.operation == Operation.DELETE) { int len = ed.text.Length; //var tokenSpan = new SnapshotSpan(buffer.CurrentSnapshot, // new Span(start + offset, len)); //var tt = tokenSpan.GetText(); //if (ed.text != tt) //{ } TextEdit edit = new TextEdit() { range = new Workspaces.Range( new Workspaces.Index(start + offset), new Workspaces.Index(start + offset + len)), NewText = "" }; offset = offset + len; edits.Add(edit); } else if (ed.operation == Operation.INSERT) { int len = ed.text.Length; TextEdit edit = new TextEdit() { range = new Workspaces.Range( new Workspaces.Index(start + offset), new Workspaces.Index(start + offset)), NewText = ed.text }; edits.Add(edit); } } delta = delta + (p.length2 - p.length1); } return(edits.ToArray()); } catch (Exception) { TextEdit[] result = new TextEdit[] { }; return(result); } }
public static List <DiagnosticInfo> PerformAnalysis(Document document) { List <DiagnosticInfo> result = new List <DiagnosticInfo>(); // Check if initial file is a grammar. ParsingResults pd_parser = ParsingResultsFactory.Create(document) as ParsingResults; if (pd_parser == null) { throw new LanguageServerException("A grammar file is not selected. Please select one first."); } Transform.ExtractGrammarType egt = new Transform.ExtractGrammarType(); ParseTreeWalker.Default.Walk(egt, pd_parser.ParseTree); bool is_grammar = egt.Type == Transform.ExtractGrammarType.GrammarType.Parser || egt.Type == Transform.ExtractGrammarType.GrammarType.Combined || egt.Type == Transform.ExtractGrammarType.GrammarType.Lexer; if (!is_grammar) { throw new LanguageServerException("A grammar file is not selected. Please select one first."); } // Find all other grammars by walking dependencies (import, vocab, file names). HashSet <string> read_files = new HashSet <string> { document.FullPath }; Dictionary <Workspaces.Document, List <TerminalNodeImpl> > every_damn_literal = new Dictionary <Workspaces.Document, List <TerminalNodeImpl> >(); for (; ;) { int before_count = read_files.Count; foreach (string f in read_files) { List <string> additional = ParsingResults.InverseImports.Where( t => t.Value.Contains(f)).Select( t => t.Key).ToList(); read_files = read_files.Union(additional).ToHashSet(); } foreach (string f in read_files) { var additional = ParsingResults.InverseImports.Where( t => t.Key == f).Select( t => t.Value); foreach (var t in additional) { read_files = read_files.Union(t).ToHashSet(); } } int after_count = read_files.Count; if (after_count == before_count) { break; } } { if (pd_parser.AllNodes != null) { int[] histogram = new int[pd_parser.Map.Length]; var fun = pd_parser.Classify; IEnumerable <IParseTree> it = pd_parser.AllNodes.Where(n => n is TerminalNodeImpl); foreach (var n in it) { var t = n as TerminalNodeImpl; int i = -1; try { i = pd_parser.Classify(pd_parser, pd_parser.Attributes, t); if (i >= 0) { histogram[i]++; } } catch (Exception) { } } for (int j = 0; j < histogram.Length; ++j) { string i = "Parser type " + j + " " + histogram[j]; result.Add( new DiagnosticInfo() { Document = document.FullPath, Severify = DiagnosticInfo.Severity.Info, Start = 0, End = 0, Message = i }); } } } //IParseTree rule = null; //var tarjan = new TarjanSCC<string, DirectedEdge<string>>(graph); //List<string> ordered = new List<string>(); //var sccs = tarjan.Compute(); //foreach (var scc in sccs) //{ // if (scc.Value.Count() <= 1) continue; // var k = scc.Key; // var v = scc.Value; // string i = "Participates in cycle " + // string.Join(" => ", scc.Value); // var (start, end) = table.rules.Where(r => r.LHS == k).Select(r => // { // var lmt = TreeEdits.LeftMostToken(r.rule); // var source_interval = lmt.SourceInterval; // int a = source_interval.a; // int b = source_interval.b; // IToken ta = pd_parser.TokStream.Get(a); // IToken tb = pd_parser.TokStream.Get(b); // var st = ta.StartIndex; // var ed = tb.StopIndex + 1; // return (st, ed); // }).FirstOrDefault(); // result.Add( // new DiagnosticInfo() // { // Document = document.FullPath, // Severify = DiagnosticInfo.Severity.Info, // Start = start, // End = end, // Message = i // }); //} // Check for useless lexer tokens. List <string> unused = new List <string>(); var pt = pd_parser.ParseTree; var l1 = TreeEdits.FindTopDown(pt, (in IParseTree t, out bool c) => { c = true; if (t is ANTLRv4Parser.LexerRuleSpecContext) { c = false; return(t); } return(null); }).ToList();
public IEnumerable <Info> Get(Document doc) { try { ParsingResults pd = ParsingResultsFactory.Create(doc); var workspace = doc.Workspace; if (pd.ParseTree == null) { Compile(workspace); } List <Info> combined = new List <Info>(); foreach (KeyValuePair <Antlr4.Runtime.IToken, int> p in pd.ColorizedList) { if (p.Key == null) { continue; } var sym = p.Key; var st = sym.StartIndex; var en = sym.StopIndex + 1; int s1 = st; int s2 = en; // Create multiple "info" for multiline tokens. var(ls, cs) = new LanguageServer.Module().GetLineColumn(st, doc); var(le, ce) = new LanguageServer.Module().GetLineColumn(en, doc); if (ls == le) { combined.Add( new Info() { start = s1, end = s2 - 1, kind = p.Value }); } else { var text = sym.Text; int start_region = st; for (int cur_index = 0; cur_index < text.Length;) { if (text[cur_index] == '\n' || text[cur_index] == '\r') { // Emit Info(). if (text[cur_index] == '\r' && (cur_index + 1 < text.Length) && text[cur_index + 1] == '\n') { cur_index++; } cur_index++; combined.Add( new Info() { start = start_region, end = st + cur_index - 1, kind = p.Value }); start_region = st + cur_index; } else { cur_index++; } } if (start_region != en) { combined.Add( new Info() { start = start_region, end = en - 1, kind = p.Value }); } } } // Sort the list. IOrderedEnumerable <Info> sorted_combined_tokens = combined.OrderBy(t => t.start).ThenBy(t => t.end); return(sorted_combined_tokens); } catch (Exception) { } return(new List <Info>()); }
public abstract void Parse(ParsingResults pd, bool bail);
public override void Parse(ParsingResults pd, bool bail) { string ffn = pd.FullFileName; string code = pd.Code; if (ffn == null) { return; } if (code == null) { return; } this.QuietAfter = pd.QuietAfter; IParseTree pt = null; // Set up Antlr to parse input grammar. byte[] byteArray = Encoding.UTF8.GetBytes(code); AntlrInputStream ais = new AntlrInputStream( new StreamReader( new MemoryStream(byteArray)).ReadToEnd()) { name = ffn }; var lexer = new W3CebnfLexer(ais); CommonTokenStream cts = new CommonTokenStream(lexer); var parser = new W3CebnfParser(cts); lexer.RemoveErrorListeners(); var lexer_error_listener = new ErrorListener <int>(parser, lexer, pd.QuietAfter); lexer.AddErrorListener(lexer_error_listener); parser.RemoveErrorListeners(); var parser_error_listener = new ErrorListener <IToken>(parser, lexer, pd.QuietAfter); parser.AddErrorListener(parser_error_listener); BailErrorHandler bail_error_handler = null; if (bail) { bail_error_handler = new BailErrorHandler(); parser.ErrorHandler = bail_error_handler; } try { pt = parser.prods(); } catch (Exception) { // Parsing error. } //StringBuilder sb = new StringBuilder(); //TreeSerializer.ParenthesizedAST(pt, sb, "", cts); //string fn = System.IO.Path.GetFileName(ffn); //fn = "c:\\temp\\" + fn; //System.IO.File.WriteAllText(fn, sb.ToString()); if (parser_error_listener.had_error || lexer_error_listener.had_error || (bail_error_handler != null && bail_error_handler.had_error)) { System.Console.Error.WriteLine("Error in parse of " + ffn); } else { System.Console.Error.WriteLine("Parse completed of " + ffn); } pd.TokStream = cts; pd.Parser = parser; pd.Lexer = lexer; pd.ParseTree = pt; Stack <IParseTree> stack = new Stack <IParseTree>(); stack.Push(pt); while (stack.Any()) { var x = stack.Pop(); if (x is TerminalNodeImpl leaf) { } else { var y = x as AttributedParseTreeNode; if (y != null) { y.ParserDetails = pd; } for (int i = 0; i < x.ChildCount; ++i) { var c = x.GetChild(i); if (c != null) { stack.Push(c); } } } } }
public static void ShowCycles(int pos, Document document) { Dictionary <string, string> result = new Dictionary <string, string>(); // Check if initial file is a grammar. ParsingResults pd_parser = ParsingResultsFactory.Create(document) as ParsingResults; if (pd_parser == null) { throw new LanguageServerException("A grammar file is not selected. Please select one first."); } Transform.ExtractGrammarType egt = new Transform.ExtractGrammarType(); ParseTreeWalker.Default.Walk(egt, pd_parser.ParseTree); bool is_grammar = egt.Type == Transform.ExtractGrammarType.GrammarType.Parser || egt.Type == Transform.ExtractGrammarType.GrammarType.Combined || egt.Type == Transform.ExtractGrammarType.GrammarType.Lexer; if (!is_grammar) { throw new LanguageServerException("A grammar file is not selected. Please select one first."); } // Find all other grammars by walking dependencies (import, vocab, file names). HashSet <string> read_files = new HashSet <string> { document.FullPath }; Dictionary <Workspaces.Document, List <TerminalNodeImpl> > every_damn_literal = new Dictionary <Workspaces.Document, List <TerminalNodeImpl> >(); for (; ;) { int before_count = read_files.Count; foreach (string f in read_files) { List <string> additional = ParsingResults.InverseImports.Where( t => t.Value.Contains(f)).Select( t => t.Key).ToList(); read_files = read_files.Union(additional).ToHashSet(); } foreach (string f in read_files) { var additional = ParsingResults.InverseImports.Where( t => t.Key == f).Select( t => t.Value); foreach (var t in additional) { read_files = read_files.Union(t).ToHashSet(); } } int after_count = read_files.Count; if (after_count == before_count) { break; } } // Construct graph of symbol usage. Transform.TableOfRules table = new Transform.TableOfRules(pd_parser, document); table.ReadRules(); table.FindPartitions(); table.FindStartRules(); Digraph <string> graph = new Digraph <string>(); foreach (Transform.TableOfRules.Row r in table.rules) { if (!r.is_parser_rule) { continue; } graph.AddVertex(r.LHS); } foreach (Transform.TableOfRules.Row r in table.rules) { if (!r.is_parser_rule) { continue; } List <string> j = r.RHS; //j.Reverse(); foreach (string rhs in j) { Transform.TableOfRules.Row sym = table.rules.Where(t => t.LHS == rhs).FirstOrDefault(); if (!sym.is_parser_rule) { continue; } DirectedEdge <string> e = new DirectedEdge <string>(r.LHS, rhs); graph.AddEdge(e); } } List <string> starts = new List <string>(); List <string> parser_lhs_rules = new List <string>(); foreach (Transform.TableOfRules.Row r in table.rules) { if (r.is_parser_rule) { parser_lhs_rules.Add(r.LHS); if (r.is_start) { starts.Add(r.LHS); } } } IParseTree rule = null; IParseTree it = pd_parser.AllNodes.Where(n => { if (!(n is ANTLRv4Parser.ParserRuleSpecContext || n is ANTLRv4Parser.LexerRuleSpecContext)) { return(false); } Interval source_interval = n.SourceInterval; int a = source_interval.a; int b = source_interval.b; IToken ta = pd_parser.TokStream.Get(a); IToken tb = pd_parser.TokStream.Get(b); var start = ta.StartIndex; var stop = tb.StopIndex + 1; return(start <= pos && pos < stop); }).FirstOrDefault(); rule = it; var k = (ANTLRv4Parser.ParserRuleSpecContext)rule; var tarjan = new TarjanSCC <string, DirectedEdge <string> >(graph); List <string> ordered = new List <string>(); var sccs = tarjan.Compute(); StringBuilder sb = new StringBuilder(); sb.AppendLine("Cycles in " + document.FullPath); var done = new List <IEnumerable <string> >(); foreach (var scc in sccs) { if (scc.Value.Count() <= 1) { continue; } if (!done.Contains(scc.Value)) { foreach (var s in scc.Value) { sb.Append(" "); sb.Append(s); } sb.AppendLine(); sb.AppendLine(); done.Add(scc.Value); } } //var scc = sccs[k.RULE_REF().ToString()]; //foreach (var v in scc) //{ // ordered.Add(v); //} }