public void Parse(ModuleContainer module) { bool tokenize = module.Compiler.Settings.TokenizeOnly; // Get source files List <SourceFile> sources = module.Compiler.SourceFiles; Location.Initialize(sources); // Create the session ParserSession session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000], }; for (int i = 0; i < sources.Count; ++i) { if (tokenize == true) { // Only tokenize the file TokenizeFile(sources[i], module, session); } else { // Parse(sources[i], module, session, Report); } } }
static void Parse(string file) { Stopwatch st = new Stopwatch(); st.Start(); SeekableStreamReader ssr = new SeekableStreamReader(File.OpenRead(file), Encoding.UTF8); SourceFile sf = new SourceFile(Path.GetFileName(file), file, 1); ModuleContext mctx = new ModuleContext(new Context.CompilerContext(new CompilerSettings())); CompilationSourceFile csf = new CompilationSourceFile(mctx, sf); ParserSession ps = new ParserSession(); // Tokenizer cs = new Tokenizer(ssr,csf,ps,mc.Compiler.Report); VSharpParser vp = new VSharpParser(ssr, csf, csf.Compiler.Report, ps, mctx); vp.parse(); //// ResolveScope IProjectContent pc = new VSharpProjectContent(); pc = pc.AddOrUpdateFiles(csf); pc = pc.AddAssemblyReferences(MinimalCorlib.Instance); var c = pc.CreateCompilation(); ResolveContext rc = new ResolveContext(c, csf.Compiler.Report); csf.DoResolve(rc); st.Stop(); Console.WriteLine(st.Elapsed); }
public static void Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report) { var file = new CompilationSourceFile(module, sourceFile); module.AddTypeContainer(file); var parser = new CSharpParser(reader, file, report, session); parser.parse(); }
public static void GenerateEmbedClasses(ModuleContainer module, ParserSession session, Report report) { List <EmbedData> embeds = new List <EmbedData>(); FindEmbedClasses(module, module, embeds); if (embeds.Count == 0) { return; } var os = new StringWriter(); os.Write(@" // Generated embed loader classes "); foreach (var e in embeds) { var loc = e._field.Location; e._field.Initializer = new TypeOf(new MemberAccess(new SimpleName("_embed_loaders", loc), e._className), loc); os.Write(@" namespace _embed_loaders {{ internal class {1} : PlayScript.EmbedLoader {{ public {1}() : base({2}, {3}, {4}, {5}, {6}) {{ }} }} }} ", PsConsts.PsRootNamespace, e._className, e.source, e.mimeType, e.embedAsCFF, e.fontFamily, e.symbol); } string fileStr = os.ToString(); var path = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.IO.Path.GetFullPath(module.Compiler.Settings.OutputFile)), "embed.g.cs"); System.IO.File.WriteAllText(path, fileStr); byte[] byteArray = Encoding.ASCII.GetBytes(fileStr); var input = new MemoryStream(byteArray, false); var reader = new SeekableStreamReader(input, System.Text.Encoding.UTF8); SourceFile file = new SourceFile(path, path, 0); file.FileType = SourceFileType.CSharp; Driver.Parse(reader, file, module, session, report); }
private void DoParse(SourceFile file, ModuleContainer module, ParserSession session, Report report, SeekableStreamReader reader) { Parse(reader, file, module, session, report); if (!_ctx.Settings.GenerateDebugInfo || report.Errors != 0 || file.HasChecksum) { return; } reader.Stream.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(reader.Stream)); }
public static void GenerateCode(ModuleContainer module, ParserSession session, Report report) { GenerateDynamicPartialClasses(module, session, report); if (report.Errors > 0) { return; } GenerateEmbedClasses(module, session, report); if (report.Errors > 0) { return; } }
public void Parse(SourceFile source, ModuleContainer module, ParserSession session, Report report) { Stream input = null; try { // Get the stream input = source.GetDataStream(); } catch { // Generate an error Report.Error(2001, "Failed to open file '{0}' for reading", source.Name); return; } // Check for header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Failed to open file '{0}' for reading because it is a binary file. A text file was expected", source.Name); input.Close(); return; } // Back to start input.Position = 0; // Create a seekable stream SeekableStreamReader reader = new SeekableStreamReader(input, context.Settings.Encoding, session.StreamReaderBuffer); // Parse the source Parse(reader, source, module, session, report); if (context.Settings.GenerateDebugInfo == true && report.Errors == 0 && source.HasChecksum == false) { // Back to start input.Position = 0; // Get the session checksum MD5 checksum = session.GetChecksumAlgorithm(); // Apply the checksum source.SetChecksum(checksum.ComputeHash(input)); } // Dispose of streams reader.Dispose(); input.Close(); }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input = null; SeekableStreamReader reader = null; try { if (file.GetInputStream != null) { reader = file.GetInputStream(file); if (reader == null) { throw new FileNotFoundException("Delegate returned null", file.Name); } } else { input = File.OpenRead(file.Name); } } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } if (reader == null) { using (input) { // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); return; } input.Position = 0; reader = new SeekableStreamReader(input, _ctx.Settings.Encoding, session.StreamReaderBuffer); DoParse(file, module, session, report, reader); } } else { DoParse(file, module, session, report, reader); } }
public void Parse(ModuleContainer module) { //sources .pdb Dateien (Symbol-Dateien für den Compiler) sources = module.Compiler.SourceFiles; Location.Initialize(sources); ParserSession session = new ParserSession { UseJayGlobalArrays = true, }; for (int i = 0; i < sources.Count; ++i) { Parse(sources[i], module, session); } }
// Methods public void TokenizeFile(SourceFile source, ModuleContainer module, ParserSession session) { Stream input = null; try { // Get the stream input = source.GetDataStream(); } catch { // Generate an error Report.Error(2001, "Failed to open file '{0}' for reading", source.Name); return; } // Manage the stream correctly using (input) { // Create a seekable stream SeekableStreamReader reader = new SeekableStreamReader(input, context.Settings.Encoding); // Create compilation source CompilationSourceFile file = new CompilationSourceFile(module, source); // Create a token lexer Tokenizer lexer = new Tokenizer(reader, file, session, context.Report); int currentToken = 0; int tokenCount = 0; int errorCount = 0; while ((currentToken = lexer.token()) != Token.EOF) { // Increment the count tokenCount++; // Check for error token if (currentToken == Token.ERROR) { errorCount++; } } } }
private void DoTokenize(SourceFile sourceFile, ModuleContainer module, ParserSession session, SeekableStreamReader reader) { var file = new CompilationSourceFile(module, sourceFile); var lexer = new Tokenizer(reader, file, session, _ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session) { Stream input; try { input = file.GetDataStream(); } catch { Debug.Log("Source file `{0}' konnte nicht gefunden werden" + file.Name); return; } SeekableStreamReader reader = new SeekableStreamReader(input, compilerContext.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session); reader.Dispose(); input.Close(); }
void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead (sourceFile.Name); } catch { Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input){ SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding); var file = new CompilationSourceFile (module, sourceFile); if (sourceFile.FileType == SourceFileType.CSharp) { Tokenizer lexer = new Tokenizer (reader, file, session); int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Token.EOF){ tokens++; if (token == Token.ERROR) errors++; } } else { Mono.PlayScript.Tokenizer lexer = new Mono.PlayScript.Tokenizer (reader, file, session); lexer.ParsingPlayScript = sourceFile.PsExtended; int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Mono.PlayScript.Token.EOF){ tokens++; if (token == Mono.PlayScript.Token.ERROR) errors++; } } } return; }
private void TokenizeFile(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input = null; SeekableStreamReader reader = null; try { if (sourceFile.GetInputStream != null) { reader = sourceFile.GetInputStream(sourceFile); if (reader == null) { throw new FileNotFoundException("Delegate returned null", sourceFile.Name); } } else { input = File.OpenRead(sourceFile.Name); } } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } if (reader == null) { using (input) { reader = new SeekableStreamReader(input, _ctx.Settings.Encoding); DoTokenize(sourceFile, module, session, reader); } } else { DoTokenize(sourceFile, module, session, reader); } }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = file.GetDataStream(); } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close(); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(input)); } reader.Dispose(); input.Close(); }
public void Parse(ModuleContainer module) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize(sources); var session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000] }; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file(sources [i], module, session); } else { Parse(sources [i], module, session, Report); } } }
// Mimicked from https://github.com/kkdevs/Patchwork/blob/master/Patchwork/MonoScript.cs#L124 public static Assembly Compile(Dictionary <string, byte[]> sources, TextWriter logger = null) { ReportPrinter reporter = logger == null ? new ConsoleReportPrinter() : new StreamReportPrinter(logger); Location.Reset(); var dllName = $"compiled_{DateTime.Now.Ticks}"; compiledAssemblies.Add(dllName); var ctx = CreateContext(reporter); ctx.Settings.SourceFiles.Clear(); var i = 0; SeekableStreamReader GetFile(SourceFile file) { return(new SeekableStreamReader(new MemoryStream(sources[file.OriginalFullPathName]), Encoding.UTF8)); } foreach (var source in sources) { ctx.Settings.SourceFiles.Add(new SourceFile(Path.GetFileName(source.Key), source.Key, i, GetFile)); i++; } var container = new ModuleContainer(ctx); RootContext.ToplevelTypes = container; Location.Initialize(ctx.Settings.SourceFiles); var session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000] }; container.EnableRedefinition(); foreach (var sourceFile in ctx.Settings.SourceFiles) { var stream = sourceFile.GetInputStream(sourceFile); var source = new CompilationSourceFile(container, sourceFile); source.EnableRedefinition(); container.AddTypeContainer(source); var parser = new CSharpParser(stream, source, session); parser.parse(); } var ass = new AssemblyDefinitionDynamic(container, dllName, $"{dllName}.dll"); container.SetDeclaringAssembly(ass); var importer = new ReflectionImporter(container, ctx.BuiltinTypes); ass.Importer = importer; var loader = new DynamicLoader(importer, ctx); ImportAppdomainAssemblies(a => importer.ImportAssembly(a, container.GlobalRootNamespace)); loader.LoadReferences(container); ass.Create(AppDomain.CurrentDomain, AssemblyBuilderAccess.RunAndSave); container.CreateContainer(); loader.LoadModules(ass, container.GlobalRootNamespace); container.InitializePredefinedTypes(); container.Define(); if (ctx.Report.Errors > 0) { logger?.WriteLine("Found errors! Aborting compilation..."); return(null); } try { ass.Resolve(); ass.Emit(); container.CloseContainer(); ass.EmbedResources(); } catch (Exception e) { logger?.WriteLine($"Failed to compile because {e}"); return(null); } return(ass.Builder); }
public void Parse (SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = File.OpenRead (file.Name); } catch { report.Error (2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte () == 77 && input.ReadByte () == 90) { report.Error (2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close (); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse (reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm (); file.SetChecksum (checksum.ComputeHash (input)); } reader.Dispose (); input.Close (); }
public static object Parse (SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report, int lineModifier = 0, int colModifier = 0) { var file = new CompilationSourceFile (module, sourceFile); module.AddTypeContainer(file); object parser = null; if (sourceFile.FileType == SourceFileType.CSharp) { CSharpParser csParser = new CSharpParser (reader, file, report, session); csParser.Lexer.Line += lineModifier; csParser.Lexer.Column += colModifier; csParser.Lexer.sbag = new SpecialsBag (); csParser.parse (); parser = csParser; } else { PlayScriptParser psParser = new PlayScriptParser (reader, file, report, session); psParser.parsing_playscript = sourceFile.PsExtended; psParser.Lexer.Line += lineModifier; psParser.Lexer.Column += colModifier; psParser.Lexer.sbag = new SpecialsBag (); psParser.parse (); parser = psParser; } return parser; }
public static void GenerateDynamicPartialClasses(ModuleContainer module, ParserSession session, Report report) { List <Class> classes = new List <Class>(); FindDynamicClasses(module, classes); if (classes.Count == 0) { return; } var os = new StringWriter(); os.Write(@" // Generated dynamic class partial classes "); foreach (var cl in classes) { os.Write(@" namespace {1} {{ partial class {2} : PlayScript.IDynamicClass {{ private PlayScript.IDynamicClass __dynamicProps; dynamic PlayScript.IDynamicClass.__GetDynamicValue(string name) {{ object value = null; if (__dynamicProps != null) {{ value = __dynamicProps.__GetDynamicValue(name); }} return value; }} bool PlayScript.IDynamicClass.__TryGetDynamicValue(string name, out object value) {{ if (__dynamicProps != null) {{ return __dynamicProps.__TryGetDynamicValue(name, out value); }} else {{ value = PlayScript.Undefined._undefined; return false; }} }} void PlayScript.IDynamicClass.__SetDynamicValue(string name, object value) {{ if (__dynamicProps == null) {{ __dynamicProps = new PlayScript.DynamicProperties(this); }} __dynamicProps.__SetDynamicValue(name, value); }} bool PlayScript.IDynamicClass.__DeleteDynamicValue(object name) {{ if (__dynamicProps != null) {{ return __dynamicProps.__DeleteDynamicValue(name); }} return false; }} bool PlayScript.IDynamicClass.__HasDynamicValue(string name) {{ if (__dynamicProps != null) {{ return __dynamicProps.__HasDynamicValue(name); }} return false; }} System.Collections.IEnumerable PlayScript.IDynamicClass.__GetDynamicNames() {{ if (__dynamicProps != null) {{ return __dynamicProps.__GetDynamicNames(); }} return null; }} }} }} ", PsConsts.PsRootNamespace, ((ITypeDefinition)cl).Namespace, cl.MemberName.Basename); } string fileStr = os.ToString(); var path = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.IO.Path.GetFullPath(module.Compiler.Settings.OutputFile)), "dynamic.g.cs"); System.IO.File.WriteAllText(path, fileStr); byte[] byteArray = Encoding.ASCII.GetBytes(fileStr); var input = new MemoryStream(byteArray, false); var reader = new SeekableStreamReader(input, System.Text.Encoding.UTF8); SourceFile file = new SourceFile(path, path, 0); file.FileType = SourceFileType.CSharp; Driver.Parse(reader, file, module, session, report); }
void Parse (ModuleContainer module) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize (sources); var session = new ParserSession () { UseJayGlobalArrays = true, LocatedTokens = new Tokenizer.LocatedToken[15000], AsLocatedTokens = new Mono.PlayScript.Tokenizer.LocatedToken[15000] }; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file (sources[i], module, session); } else { Parse (sources[i], module, session, Report); } } }
public static void Parse(SeekableStreamReader reader, SourceFile source, ModuleContainer module, ParserSession session, Report report) { // Create the compilation source CompilationSourceFile file = new CompilationSourceFile(module, source); // Add to module module.AddTypeContainer(file); // Create the parser and run CSharpParser parser = new CSharpParser(reader, file, report, session); parser.parse(); }
void ParseStartupFiles () { Driver d = new Driver (ctx); Location.Initialize (ctx.SourceFiles); var parser_session = new ParserSession (); for (int i = 0; i < startup_files; ++i) { var sf = ctx.SourceFiles [i]; d.Parse (sf, module, parser_session, ctx.Report); } }
void ParseParallel (ModuleContainer module) { var sources = module.Compiler.SourceFiles; Location.Initialize (sources); var pcount = Environment.ProcessorCount; var threads = new Thread[System.Math.Max (2, pcount - 1)]; for (int i = 0; i < threads.Length; ++i) { var t = new Thread (l => { var session = new ParserSession () { //UseJayGlobalArrays = true, }; var report = new Report (ctx, Report.Printer); // TODO: Implement flush at once printer for (int ii = (int) l; ii < sources.Count; ii += threads.Length) { Parse (sources[ii], module, session, report); } // TODO: Merge warning regions }); t.Start (i); threads[i] = t; } for (int t = 0; t < threads.Length; ++t) { threads[t].Join (); } }
SyntaxTree Parse(ITextSource program, string fileName, int initialLine, int initialColumn) { lock (parseLock) { errorReportPrinter = new ErrorReportPrinter (""); var ctx = new CompilerContext (compilerSettings.ToMono(), errorReportPrinter); ctx.Settings.TabSize = 1; var reader = new SeekableStreamReader (program); var file = new SourceFile (fileName, fileName, 0); Location.Initialize (new List<SourceFile> (new [] { file })); var module = new ModuleContainer (ctx); var session = new ParserSession (); session.LocationsBag = new LocationsBag (); var report = new Report (ctx, errorReportPrinter); CompilerCompilationUnit top; if (String.IsNullOrEmpty(fileName) || fileName.EndsWith(".play") || fileName.EndsWith(".as")) { if (String.IsNullOrEmpty(fileName) || fileName.EndsWith(".play")) file.PsExtended = true; // Assume playscript unless we have an actual file ext. var parser = (Mono.PlayScript.PlayScriptParser)Driver.Parse(reader, file, module, session, report, initialLine - 1, initialColumn - 1); top = new CompilerCompilationUnit() { ModuleCompiled = module, LocationsBag = session.LocationsBag, SpecialsBag = parser.Lexer.sbag, Conditionals = parser.Lexer.SourceFile.Conditionals }; } else { var parser = (Mono.CSharpPs.CSharpParser)Driver.Parse(reader, file, module, session, report, initialLine - 1, initialColumn - 1); top = new CompilerCompilationUnit() { ModuleCompiled = module, LocationsBag = session.LocationsBag, SpecialsBag = parser.Lexer.sbag, Conditionals = parser.Lexer.SourceFile.Conditionals }; } var unit = Parse (top, fileName); unit.Errors.AddRange (errorReportPrinter.Errors); CompilerCallableEntryPoint.Reset (); return unit; } }
public Assembly DoStaticCompile(IEnumerable <object> sources, string prefix = "compiled_") { reporter.Reset(); Location.Reset(); var ctx = BuildContext(reporter); ctx.Settings.SourceFiles.Clear(); int i = 0; var allBytes = new MemoryStream(); List <Assembly> imports = new List <Assembly>(); foreach (var fo in sources) { Assembly impass = fo as Assembly; if (impass != null) { imports.Add(impass); continue; } var f = fo as string; byte[] fbuf = fo as byte[]; if (f != null) { if (!f.EndsWith(".cs")) { continue; } var bname = (f + "\n").ToBytes(); allBytes.Write(bname, 0, bname.Length); fbuf = File.ReadAllBytes(f); allBytes.Write(fbuf, 0, fbuf.Length); } else { allBytes.Write(fbuf, 0, fbuf.Length); f = null; } i++; ctx.Settings.SourceFiles.Add(new SourceFile(f == null ? "<eval>" : Path.GetFileName(f), f ?? "<eval>", i, (o) => { return(new SeekableStreamReader(new MemoryStream(fbuf), Encoding.UTF8)); })); } string dllname = prefix + (counter++) + ".dll"; if (tempdir != null) { if (hashkey != null) { var hb = hashkey.ToBytes(); allBytes.Write(hb, 0, hb.Length); } var hash = prefix + Ext.HashToString(allBytes.ToArray()).Substring(0, 12).ToLower() + ".dll"; if (hashkey == null) { hashkey = hash; } dllname = Path.Combine(tempdir, hash); if (File.Exists(dllname)) { var nam = AssemblyName.GetAssemblyName(dllname); unloaded.Remove(nam.Name.ToLower()); return(Assembly.Load(nam)); } } var mod = new ModuleContainer(ctx); RootContext.ToplevelTypes = mod; Location.Initialize(ctx.Settings.SourceFiles); var session = new ParserSession() { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000] }; mod.EnableRedefinition(); foreach (var finfo in ctx.Settings.SourceFiles) { var fs = finfo.GetInputStream(finfo); var csrc = new CompilationSourceFile(mod, finfo); csrc.EnableRedefinition(); mod.AddTypeContainer(csrc); var parser = new CSharpParser(fs, csrc, session); parser.parse(); } Debug.Log("Defining new assembly " + dllname); var ass = new AssemblyDefinitionDynamic(mod, Path.GetFileNameWithoutExtension(dllname), dllname); mod.SetDeclaringAssembly(ass); var importer = new ReflectionImporter(mod, ctx.BuiltinTypes); ass.Importer = importer; var loader = new DynamicLoader(importer, ctx); ImportAssemblies((a) => importer.ImportAssembly(a, mod.GlobalRootNamespace), prefix); foreach (var impa in imports) { importer.ImportAssembly(impa, mod.GlobalRootNamespace); } loader.LoadReferences(mod); ass.Create(AppDomain.CurrentDomain, AssemblyBuilderAccess.RunAndSave); mod.CreateContainer(); loader.LoadModules(ass, mod.GlobalRootNamespace); mod.InitializePredefinedTypes(); mod.Define(); if (ctx.Report.Errors > 0) { tw.WriteLine($"{ctx.Report.Errors} errors, aborting."); return(null); } try { ass.Resolve(); ass.Emit(); mod.CloseContainer(); ass.EmbedResources(); } catch (Exception ex) { tw.WriteLine($"Link error: " + ex.ToString()); return(null); } if (tempdir != null) { ass.Save(); } return(ass.Builder); }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, ParserSession session) : this (reader, file, file.Compiler.Report, session) { }
/* /// <summary> /// Parses a file snippet; guessing what the code snippet represents (whole file, type members, block, type reference, expression). /// </summary> public AstNode ParseSnippet (string code) { // TODO: add support for parsing a part of a file throw new NotImplementedException (); } */ public DocumentationReference ParseDocumentationReference (string cref) { // see Mono.CSharpPs.DocumentationBuilder.HandleXrefCommon if (cref == null) throw new ArgumentNullException ("cref"); // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace ('{', '<').Replace ('}', '>'); lock (parseLock) { errorReportPrinter = new ErrorReportPrinter(""); var ctx = new CompilerContext(compilerSettings.ToMono(), errorReportPrinter); ctx.Settings.TabSize = 1; var reader = new SeekableStreamReader(new StringTextSource (cref)); var file = new SourceFile("", "", 0); Location.Initialize(new List<SourceFile> (new [] { file })); var module = new ModuleContainer(ctx); module.DocumentationBuilder = new DocumentationBuilder(module); var source_file = new CompilationSourceFile (module); var report = new Report (ctx, errorReportPrinter); ParserSession session = new ParserSession (); session.LocationsBag = new LocationsBag (); var parser = new Mono.PlayScript.PlayScriptParser (reader, source_file, report, session); parser.parsing_playscript = (source_file.SourceFile != null) ? source_file.SourceFile.PsExtended : true; parser.Lexer.Line += initialLocation.Line - 1; parser.Lexer.Column += initialLocation.Column - 1; parser.Lexer.putback_char = Mono.PlayScript.Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse (); if (report.Errors > 0) { // Report.Warning (1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", // mc.GetSignatureForError (), cref); } ConversionVisitor conversionVisitor = new ConversionVisitor (false, session.LocationsBag); DocumentationReference docRef = conversionVisitor.ConvertXmlDoc(module.DocumentationBuilder); CompilerCallableEntryPoint.Reset(); return docRef; } }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report, ParserSession session) { this.file = file; current_container = current_namespace = file; this.module = file.Module; this.compiler = file.Compiler; this.settings = compiler.Settings; this.report = report; lang_version = settings.Version; yacc_verbose_flag = settings.VerboseParserFlag; doc_support = settings.DocumentationFile != null; lexer = new Tokenizer (reader, file, session); oob_stack = new Stack<object> (); lbag = session.LocationsBag; use_global_stacks = session.UseJayGlobalArrays; parameters_bucket = session.ParametersStack; }