public static Document CheckDoc(string path) { string file_name = path; Document document = Workspaces.Workspace.Instance.FindDocument(file_name); if (document == null) { document = new Workspaces.Document(file_name); try { // Open the text file using a stream reader. using (StreamReader sr = new StreamReader(file_name)) { // Read the stream to a string, and write the string to the console. string str = sr.ReadToEnd(); document.Code = str; } } catch (IOException) { } Project project = Workspaces.Workspace.Instance.FindProject("Misc"); if (project == null) { project = new Project("Misc", "Misc", "Misc"); Workspaces.Workspace.Instance.AddChild(project); } project.AddDocument(document); } document.Changed = true; _ = ParsingResultsFactory.Create(document); var workspace = document.Workspace; _ = new LanguageServer.Module().Compile(workspace); return(document); }
Document CheckDoc(System.Uri uri) { var decoded = HttpUtility.UrlDecode(uri.AbsoluteUri); var file_name = new Uri(decoded).LocalPath; var document = _workspace.FindDocument(file_name); if (document == null) { document = new Workspaces.Document(file_name, file_name); try { // Open the text file using a stream reader. using (StreamReader sr = new StreamReader(file_name)) { // Read the stream to a string, and write the string to the console. String str = sr.ReadToEnd(); document.Code = str; } } catch (IOException e) { } var project = _workspace.FindProject("Misc"); if (project == null) { project = new Project("Misc", "Misc", "Misc"); _workspace.AddChild(project); } project.AddDocument(document); document.Changed = true; var pd = ParserDetailsFactory.Create(document); var to_do = LanguageServer.Module.Compile(); } return(document); }
public Document CreateDoc(string path, string code) { string file_name = path; Document document = _repl._workspace.FindDocument(file_name); if (document == null) { document = new Workspaces.Document(file_name); Project project = _repl._workspace.FindProject("Misc"); if (project == null) { project = new Project("Misc", "Misc", "Misc"); _repl._workspace.AddChild(project); } project.AddDocument(document); } document.Code = code; return(document); }
public static Document CreateStringDocument(string input) { string file_name = "Dummy" + random_number + ".g4"; Document document = Workspaces.Workspace.Instance.FindDocument(file_name); if (document == null) { document = new Workspaces.Document(file_name); document.Code = input; Project project = Workspaces.Workspace.Instance.FindProject("Misc"); if (project == null) { project = new Project("Misc", "Misc", "Misc"); Workspaces.Workspace.Instance.AddChild(project); } project.AddDocument(document); } document.Changed = true; _ = ParsingResultsFactory.Create(document); var workspace = document.Workspace; _ = new LanguageServer.Module().Compile(workspace); return(document); }
public W3CebnfParsingResults(Document item) : base(item) { Passes.Add(() => { int before_count = 0; if (!ParsingResults.InverseImports.ContainsKey(this.FullFileName)) { ParsingResults.InverseImports.Add(this.FullFileName, new HashSet <string>()); } foreach (KeyValuePair <string, HashSet <string> > x in ParsingResults.InverseImports) { before_count++; before_count = before_count + x.Value.Count; } if (ParseTree == null) { return(false); } int after_count = 0; foreach (KeyValuePair <string, HashSet <string> > dep in ParsingResults.InverseImports) { string name = dep.Key; Workspaces.Document x = item.Workspace.FindDocument(name); if (x == null) { // Add document. Workspaces.Container proj = Item.Parent; Workspaces.Document new_doc = new Workspaces.Document(name); proj.AddChild(new_doc); after_count++; } after_count++; after_count = after_count + dep.Value.Count; } return(before_count != after_count); }); Passes.Add(() => { // The workspace is completely loaded. Create scopes for all files in workspace // if they don't already exist. foreach (KeyValuePair <string, HashSet <string> > dep in InverseImports) { string name = dep.Key; _scopes.TryGetValue(name, out IScope file_scope); if (file_scope != null) { continue; } _scopes[name] = new FileScope(name, null); } // Set up search path scopes for Imports relationship. IScope root = _scopes[FullFileName]; foreach (string dep in Imports) { // Don't add if already have this search path. IScope dep_scope = _scopes[dep]; bool found = false; foreach (IScope scope in root.NestedScopes) { if (scope is SearchPathScope) { SearchPathScope spc = scope as SearchPathScope; if (spc.NestedScopes.First() == dep_scope) { found = true; break; } } } if (!found) { SearchPathScope import = new SearchPathScope(root); import.nest(dep_scope); root.nest(import); } } root.empty(); RootScope = root; return(false); }); Passes.Add(() => { if (ParseTree == null) { return(false); } ParseTreeWalker.Default.Walk(new Pass2Listener(this), ParseTree); return(false); }); Passes.Add(() => { if (ParseTree == null) { return(false); } ParseTreeWalker.Default.Walk(new Pass3Listener(this), ParseTree); return(false); }); }
public static List <ParserDetails> Compile() { try { Workspace ws = Workspaces.Workspace.Instance; // Get all changed files. HashSet <ParserDetails> to_do = new HashSet <ParserDetails>(); DoAgain: // Get current directory, and add all grammar files. foreach (Document document in Workspaces.DFSContainer.DFS(ws)) { string file_name = document.FullPath; if (file_name == null) { continue; } Container parent = document.Parent; IGrammarDescription gd = LanguageServer.GrammarDescriptionFactory.Create(file_name); if (gd == null) { continue; } // Get suffix of file_name. string extension = System.IO.Path.GetExtension(file_name); string directory = System.IO.Path.GetDirectoryName(file_name); foreach (string file in System.IO.Directory.GetFiles(directory)) { if (System.IO.Path.GetExtension(file) != extension) { continue; } IGrammarDescription g2 = LanguageServer.GrammarDescriptionFactory.Create(file); if (g2 == null) { continue; } Document x = Workspaces.Workspace.Instance.FindDocument(file); if (x == null) { // Add document. Container proj = parent; Document new_doc = new Workspaces.Document(file); proj.AddChild(new_doc); } ParserDetails p2 = ParserDetailsFactory.Create(document); if (!p2.Changed) { continue; } to_do.Add(p2); } } foreach (Document document in Workspaces.DFSContainer.DFS(ws)) { string file_name = document.FullPath; if (file_name == null) { continue; } IGrammarDescription gd = LanguageServer.GrammarDescriptionFactory.Create(file_name); if (gd == null) { continue; } // file_name can be a URI, so this doesn't make sense. //if (!System.IO.File.Exists(file_name)) continue; ParserDetails pd = ParserDetailsFactory.Create(document); if (!pd.Changed) { continue; } to_do.Add(pd); } Digraph <ParserDetails> g = ConstructGraph(to_do); foreach (ParserDetails v in g.Vertices) { v.Item.Changed = true; // Force. v.Parse(); } bool changed = true; for (int pass = 0; changed; pass++) { changed = false; foreach (ParserDetails v in g.Vertices) { int number_of_passes = v.Passes.Count; if (pass < number_of_passes) { bool reset = v.Pass(pass); if (reset) { goto DoAgain; } changed = true; } } } foreach (ParserDetails v in g.Vertices) { v.GatherDefs(); } foreach (ParserDetails v in g.Vertices) { v.GatherRefs(); } foreach (ParserDetails v in g.Vertices) { v.GatherErrors(); } return(g.Vertices.ToList()); } catch (Exception e) { Logger.Log.Notify(e.ToString()); } return(new List <ParserDetails>()); }
public List <ParsingResults> Compile(Workspace workspace) { try { // Parse changed files. If it's a combined grammar, no need to read // other files. If it's a parser grammar, get the lexer grammar and read // that. If it's a lexer grammar, read the parser grammar. // This has to be done intelligently. We don't want to parse every damn // file here. It's not going to be useful, and some files may not even work. // Let's find all changed files. Assume that the "workspace" contains // changed files. If it's not in the workspace already, then the file // has not changed, so we are not interested in it--unless it is a // depedency. DoAgain: HashSet <ParsingResults> to_do = new HashSet <ParsingResults>(); HashSet <ParsingResults> new_to_do = new HashSet <ParsingResults>(); foreach (Document document in Workspaces.DFSContainer.DFS(workspace)) { if (document.GetParseTree() != null && !document.Changed) { continue; } string file_name = document.FullPath; if (file_name == null) { continue; } Container parent = document.Parent; ParsingResults v = LanguageServer.ParserDescriptionFactory.Create(document); if (v == null && document.ParseAs == null) { continue; } if (new_to_do.Contains(v)) { continue; } new_to_do.Add(v); } for (; ;) { HashSet <ParsingResults> diff = new_to_do.Subtract(to_do); to_do = new_to_do; if (diff.Count == 0) { break; } // Do a quick bail-mode parse to determine Imports, InverseImports, and // workspace changes. foreach (ParsingResults v in diff) { v.Parse(); v.GetGrammarBasics(); } // Add all files to workspace. new_to_do = new HashSet <ParsingResults>(); foreach (KeyValuePair <string, HashSet <string> > dep in ParsingResults.InverseImports) { string name = dep.Key; Workspaces.Document x = workspace.FindDocument(name); ParsingResults v = LanguageServer.ParserDescriptionFactory.Create(x); new_to_do.Add(v); } } // Sort the grammars files based on includes to derive the symbol table. Digraph <ParsingResults> g = ConstructGraph(to_do); bool changed = true; for (int pass = 0; changed; pass++) { changed = false; foreach (ParsingResults v in g.Vertices) { int number_of_passes = v.Passes.Count; if (pass < number_of_passes) { try { bool reset = v.Pass(pass); if (reset) { goto DoAgain; } } #pragma warning disable 0168 catch (Exception eeks) { } #pragma warning restore 0168 changed = true; } } } foreach (ParsingResults v in g.Vertices) { v.GatherRefsDefsAndOthers(); } foreach (ParsingResults v in g.Vertices) { v.GatherErrors(); } return(g.Vertices.ToList()); } catch (Exception e) { Logger.Log.Notify(e.ToString()); } return(new List <ParsingResults>()); }