void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead (sourceFile.Name); } catch { Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input){ SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding); var file = new CompilationSourceFile (module, sourceFile); Tokenizer lexer = new Tokenizer (reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Token.EOF){ tokens++; if (token == Token.ERROR) errors++; } Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
void Parse(ModuleContainer module, bool returnAtSignInVerbatimIdentifiers) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize(sources); var session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000], LocationsBag = new LocationsBag() }; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file(sources[i], module, session, returnAtSignInVerbatimIdentifiers); } else { Parse(sources[i], module, session, Report, returnAtSignInVerbatimIdentifiers); } } }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = File.OpenRead(file.Name); } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close(); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(input)); } reader.Dispose(); input.Close(); }
void tokenize_file(SourceFile sourceFile, ModuleContainer module, ParserSession session, bool returnAtSignInVerbatimIdentifiers) { Stream input; try { input = File.OpenRead(sourceFile.Name); } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input){ SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding); var file = new CompilationSourceFile(module, sourceFile); Tokenizer lexer = new Tokenizer(reader, file, session, ctx.Report, returnAtSignInVerbatimIdentifiers); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
public static void Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report) { var file = new CompilationSourceFile(module, sourceFile); module.AddTypeContainer(file); CSharpParser parser = new CSharpParser(reader, file, report, session); parser.parse(); }
public static void GenerateCode (ModuleContainer module, ParserSession session, Report report) { GenerateDynamicPartialClasses(module, session, report); if (report.Errors > 0) return; GenerateEmbedClasses(module, session, report); if (report.Errors > 0) return; }
void DoParse(SourceFile file, ModuleContainer module, ParserSession session, Report report, SeekableStreamReader reader) { Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { reader.Stream.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(reader.Stream)); } }
void ParseStartupFiles () { Driver d = new Driver (ctx); Location.Initialize (ctx.SourceFiles); var parser_session = new ParserSession (); for (int i = 0; i < startup_files; ++i) { var sf = ctx.SourceFiles [i]; d.Parse (sf, module, parser_session, ctx.Report); } }
void tokenize_file(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead(sourceFile.Name); } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input) { SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding); var file = new CompilationSourceFile(module, sourceFile); if (sourceFile.FileType == SourceFileType.CSharp) { Tokenizer lexer = new Tokenizer(reader, file, session); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } } else { Mono.PlayScript.Tokenizer lexer = new Mono.PlayScript.Tokenizer(reader, file, session); lexer.ParsingPlayScript = sourceFile.PsExtended; int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Mono.PlayScript.Token.EOF) { tokens++; if (token == Mono.PlayScript.Token.ERROR) { errors++; } } } } return; }
void DoTokenize(SourceFile sourceFile, ModuleContainer module, ParserSession session, SeekableStreamReader reader) { var file = new CompilationSourceFile(module, sourceFile); Tokenizer lexer = new Tokenizer(reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input = null; SeekableStreamReader reader = null; try { if (file.GetInputStream != null) { reader = file.GetInputStream(file); if (reader == null) { throw new FileNotFoundException("Delegate returned null", file.Name); } } else { input = File.OpenRead(file.Name); } } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } if (reader == null) { using (input) { // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); return; } input.Position = 0; reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); DoParse(file, module, session, report, reader); } } else { DoParse(file, module, session, report, reader); } }
void Parse (ModuleContainer module) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize (sources); var session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000] }; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file (sources[i], module, session); } else { Parse (sources[i], module, session, Report); } } }
void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead (sourceFile.Name); } catch { Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input) { SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding); var file = new CompilationSourceFile (module, sourceFile); if (sourceFile.FileType == SourceFileType.CSharp) { Tokenizer lexer = new Tokenizer (reader, file, session); int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Token.EOF){ tokens++; if (token == Token.ERROR) errors++; } } else { Mono.PlayScript.Tokenizer lexer = new Mono.PlayScript.Tokenizer (reader, file, session); lexer.ParsingPlayScript = sourceFile.PsExtended; int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Mono.PlayScript.Token.EOF){ tokens++; if (token == Mono.PlayScript.Token.ERROR) errors++; } } } return; }
void Parse(ModuleContainer module) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize(sources); var session = new ParserSession() { UseJayGlobalArrays = true, LocatedTokens = new Tokenizer.LocatedToken[15000], AsLocatedTokens = new Mono.PlayScript.Tokenizer.LocatedToken[15000] }; bool has_playscript_files = false; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file(sources[i], module, session); } else { Parse(sources[i], module, session, Report); } if (sources[i].FileType == SourceFileType.PlayScript) { has_playscript_files = true; } } // PlayScript needs to add generated code after parsing. if (has_playscript_files) { Mono.PlayScript.CodeGenerator.GenerateCode(module, session, Report); } }
void ParseParallel(ModuleContainer module) { var sources = module.Compiler.SourceFiles; Location.Initialize(sources); var pcount = Environment.ProcessorCount; var threads = new Thread[System.Math.Max(2, pcount - 1)]; for (int i = 0; i < threads.Length; ++i) { var t = new Thread(l => { var session = new ParserSession() { //UseJayGlobalArrays = true, }; var report = new Report(ctx, Report.Printer); // TODO: Implement flush at once printer for (int ii = (int)l; ii < sources.Count; ii += threads.Length) { Parse(sources[ii], module, session, report); } // TODO: Merge warning regions }); t.Start(i); threads[i] = t; } for (int t = 0; t < threads.Length; ++t) { threads[t].Join(); } }
public void Parse(ModuleContainer module) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize(sources); var session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000] }; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file(sources[i], module, session); } else { Parse(sources[i], module, session, Report); } } }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, ParserSession session) : this (reader, file, file.Compiler.Report, session) { }
// // Processes "see" or "seealso" elements from cref attribute. // void HandleXrefCommon(MemberCore mc, XmlElement xref) { string cref = xref.GetAttribute("cref"); // when, XmlReader, "if (cref == null)" if (!xref.HasAttribute("cref")) { return; } // Nothing to be resolved the reference is marked explicitly if (cref.Length > 2 && cref [1] == ':') { return; } // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace('{', '<').Replace('}', '>'); var encoding = module.Compiler.Settings.Encoding; var s = new MemoryStream(encoding.GetBytes(cref)); var source_file = new CompilationSourceFile(doc_module, mc.Location.SourceFile); var report = new Report(doc_module.Compiler, new NullReportPrinter()); if (session == null) { session = new ParserSession { UseJayGlobalArrays = true } } ; SeekableStreamReader seekable = new SeekableStreamReader(s, encoding, session.StreamReaderBuffer); var parser = new CSharpParser(seekable, source_file, report, session); ParsedParameters = null; ParsedName = null; ParsedBuiltinType = null; ParsedOperator = null; parser.Lexer.putback_char = Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse(); if (report.Errors > 0) { Report.Warning(1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", mc.GetSignatureForError(), cref); xref.SetAttribute("cref", "!:" + cref); return; } MemberSpec member; string prefix = null; FullNamedExpression fne = null; // // Try built-in type first because we are using ParsedName as identifier of // member names on built-in types // if (ParsedBuiltinType != null && (ParsedParameters == null || ParsedName != null)) { member = ParsedBuiltinType.Type; } else { member = null; } if (ParsedName != null || ParsedOperator.HasValue) { TypeSpec type = null; string member_name = null; if (member == null) { if (ParsedOperator.HasValue) { type = mc.CurrentType; } else if (ParsedName.Left != null) { fne = ResolveMemberName(mc, ParsedName.Left); if (fne != null) { var ns = fne as NamespaceExpression; if (ns != null) { fne = ns.LookupTypeOrNamespace(mc, ParsedName.Name, ParsedName.Arity, LookupMode.Probing, Location.Null); if (fne != null) { member = fne.Type; } } else { type = fne.Type; } } } else { fne = ResolveMemberName(mc, ParsedName); if (fne == null) { type = mc.CurrentType; } else if (ParsedParameters == null) { member = fne.Type; } else if (fne.Type.MemberDefinition == mc.CurrentType.MemberDefinition) { member_name = Constructor.ConstructorName; type = fne.Type; } } } else { type = (TypeSpec)member; member = null; } if (ParsedParameters != null) { var old_printer = mc.Module.Compiler.Report.SetPrinter(new NullReportPrinter()); try { var context = new DocumentationMemberContext(mc, ParsedName ?? MemberName.Null); foreach (var pp in ParsedParameters) { pp.Resolve(context); } } finally { mc.Module.Compiler.Report.SetPrinter(old_printer); } } if (type != null) { if (member_name == null) { member_name = ParsedOperator.HasValue ? Operator.GetMetadataName(ParsedOperator.Value) : ParsedName.Name; } int parsed_param_count; if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { parsed_param_count = ParsedParameters.Count - 1; } else if (ParsedParameters != null) { parsed_param_count = ParsedParameters.Count; } else { parsed_param_count = 0; } int parameters_match = -1; do { var members = MemberCache.FindMembers(type, member_name, true); if (members != null) { foreach (var m in members) { if (ParsedName != null && m.Arity != ParsedName.Arity) { continue; } if (ParsedParameters != null) { IParametersMember pm = m as IParametersMember; if (pm == null) { continue; } if (m.Kind == MemberKind.Operator && !ParsedOperator.HasValue) { continue; } var pm_params = pm.Parameters; int i; for (i = 0; i < parsed_param_count; ++i) { var pparam = ParsedParameters[i]; if (i >= pm_params.Count || pparam == null || pparam.TypeSpec == null || !TypeSpecComparer.Override.IsEqual(pparam.TypeSpec, pm_params.Types[i]) || (pparam.Modifier & Parameter.Modifier.RefOutMask) != (pm_params.FixedParameters[i].ModFlags & Parameter.Modifier.RefOutMask)) { if (i > parameters_match) { parameters_match = i; } i = -1; break; } } if (i < 0) { continue; } if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { if (pm.MemberType != ParsedParameters[parsed_param_count].TypeSpec) { parameters_match = parsed_param_count + 1; continue; } } else { if (parsed_param_count != pm_params.Count) { continue; } } } if (member != null) { Report.Warning(419, 3, mc.Location, "Ambiguous reference in cref attribute `{0}'. Assuming `{1}' but other overloads including `{2}' have also matched", cref, member.GetSignatureForError(), m.GetSignatureForError()); break; } member = m; } } // Continue with parent type for nested types if (member == null) { type = type.DeclaringType; } else { type = null; } } while (type != null); if (member == null && parameters_match >= 0) { for (int i = parameters_match; i < parsed_param_count; ++i) { Report.Warning(1580, 1, mc.Location, "Invalid type for parameter `{0}' in XML comment cref attribute `{1}'", (i + 1).ToString(), cref); } if (parameters_match == parsed_param_count + 1) { Report.Warning(1581, 1, mc.Location, "Invalid return type in XML comment cref attribute `{0}'", cref); } } } } if (member == null) { Report.Warning(1574, 1, mc.Location, "XML comment on `{0}' has cref attribute `{1}' that could not be resolved", mc.GetSignatureForError(), cref); cref = "!:" + cref; } else if (member == InternalType.Namespace) { cref = "N:" + fne.GetSignatureForError(); } else { prefix = GetMemberDocHead(member); cref = prefix + member.GetSignatureForDocumentation(); } xref.SetAttribute("cref", cref); }
void ParseParallel (ModuleContainer module) { var sources = module.Compiler.SourceFiles; Location.Initialize (sources); var pcount = Environment.ProcessorCount; var threads = new Thread[System.Math.Max (2, pcount - 1)]; for (int i = 0; i < threads.Length; ++i) { var t = new Thread (l => { var session = new ParserSession () { //UseJayGlobalArrays = true, }; var report = new Report (ctx, Report.Printer); // TODO: Implement flush at once printer for (int ii = (int) l; ii < sources.Count; ii += threads.Length) { Parse (sources[ii], module, session, report); } // TODO: Merge warning regions }); t.Start (i); threads[i] = t; } for (int t = 0; t < threads.Length; ++t) { threads[t].Join (); } }
public static CSharpParser Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report, int lineModifier = 0, int colModifier = 0) { var file = new CompilationSourceFile(module, sourceFile); module.AddTypeContainer(file); CSharpParser parser = new CSharpParser(reader, file, report, session); parser.Lexer.Line += lineModifier; parser.Lexer.Column += colModifier; parser.Lexer.sbag = new SpecialsBag(); parser.parse(); return(parser); }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, ParserSession session, bool returnAtSignInVerbatimIdentifiers) : this (reader, file, file.Compiler.Report, session, returnAtSignInVerbatimIdentifiers) { }
public void Parse (SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = File.OpenRead (file.Name); } catch { report.Error (2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte () == 77 && input.ReadByte () == 90) { report.Error (2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close (); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse (reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm (); file.SetChecksum (checksum.ComputeHash (input)); } reader.Dispose (); input.Close (); }
public static void GenerateDynamicPartialClasses(ModuleContainer module, ParserSession session, Report report) { List<Class> classes = new List<Class>(); FindDynamicClasses(module, classes); if (classes.Count == 0) return; var os = new StringWriter(); os.Write (@" // Generated dynamic class partial classes "); foreach (var cl in classes) { os.Write (@" namespace {1} {{ partial class {2} : PlayScript.IDynamicClass {{ private PlayScript.IDynamicClass __dynamicProps; dynamic PlayScript.IDynamicClass.__GetDynamicValue(string name) {{ object value = null; if (__dynamicProps != null) {{ value = __dynamicProps.__GetDynamicValue(name); }} return value; }} bool PlayScript.IDynamicClass.__TryGetDynamicValue(string name, out object value) {{ if (__dynamicProps != null) {{ return __dynamicProps.__TryGetDynamicValue(name, out value); }} else {{ value = null; return false; }} }} void PlayScript.IDynamicClass.__SetDynamicValue(string name, object value) {{ if (__dynamicProps == null) {{ __dynamicProps = new PlayScript.DynamicProperties(this); }} __dynamicProps.__SetDynamicValue(name, value); }} bool PlayScript.IDynamicClass.__DeleteDynamicValue(object name) {{ if (__dynamicProps != null) {{ return __dynamicProps.__DeleteDynamicValue(name); }} return false; }} bool PlayScript.IDynamicClass.__HasDynamicValue(string name) {{ if (__dynamicProps != null) {{ return __dynamicProps.__HasDynamicValue(name); }} return false; }} System.Collections.IEnumerable PlayScript.IDynamicClass.__GetDynamicNames() {{ if (__dynamicProps != null) {{ return __dynamicProps.__GetDynamicNames(); }} return null; }} }} }} ", PsConsts.PsRootNamespace, ((ITypeDefinition)cl).Namespace, cl.MemberName.Basename); } string fileStr = os.ToString(); var path = System.IO.Path.Combine (System.IO.Path.GetDirectoryName(System.IO.Path.GetFullPath(module.Compiler.Settings.OutputFile)), "dynamic.g.cs"); System.IO.File.WriteAllText(path, fileStr); byte[] byteArray = Encoding.ASCII.GetBytes( fileStr ); var input = new MemoryStream( byteArray, false ); var reader = new SeekableStreamReader (input, System.Text.Encoding.UTF8); SourceFile file = new SourceFile(path, path, 0); file.FileType = SourceFileType.CSharp; Driver.Parse (reader, file, module, session, report); }
// // Processes "see" or "seealso" elements from cref attribute. // void HandleXrefCommon (MemberCore mc, TypeContainer ds, XmlElement xref) { string cref = xref.GetAttribute ("cref"); // when, XmlReader, "if (cref == null)" if (!xref.HasAttribute ("cref")) return; // Nothing to be resolved the reference is marked explicitly if (cref.Length > 2 && cref [1] == ':') return; // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace ('{', '<').Replace ('}', '>'); var encoding = module.Compiler.Settings.Encoding; var s = new MemoryStream (encoding.GetBytes (cref)); var source_file = new CompilationSourceFile (doc_module, mc.Location.SourceFile); var report = new Report (doc_module.Compiler, new NullReportPrinter ()); if (session == null) session = new ParserSession () { UseJayGlobalArrays = true }; SeekableStreamReader seekable = new SeekableStreamReader (s, encoding, session.StreamReaderBuffer); var parser = new CSharpParser (seekable, source_file, report, session); ParsedParameters = null; ParsedName = null; ParsedBuiltinType = null; ParsedOperator = null; parser.Lexer.putback_char = Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse (); if (report.Errors > 0) { Report.Warning (1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", mc.GetSignatureForError (), cref); xref.SetAttribute ("cref", "!:" + cref); return; } MemberSpec member; string prefix = null; FullNamedExpression fne = null; // // Try built-in type first because we are using ParsedName as identifier of // member names on built-in types // if (ParsedBuiltinType != null && (ParsedParameters == null || ParsedName != null)) { member = ParsedBuiltinType.Type; } else { member = null; } if (ParsedName != null || ParsedOperator.HasValue) { TypeSpec type = null; string member_name = null; if (member == null) { if (ParsedOperator.HasValue) { type = mc.CurrentType; } else if (ParsedName.Left != null) { fne = ResolveMemberName (mc, ParsedName.Left); if (fne != null) { var ns = fne as Namespace; if (ns != null) { fne = ns.LookupTypeOrNamespace (mc, ParsedName.Name, ParsedName.Arity, LookupMode.Probing, Location.Null); if (fne != null) { member = fne.Type; } } else { type = fne.Type; } } } else { fne = ResolveMemberName (mc, ParsedName); if (fne == null) { type = mc.CurrentType; } else if (ParsedParameters == null) { member = fne.Type; } else if (fne.Type.MemberDefinition == mc.CurrentType.MemberDefinition) { member_name = Constructor.ConstructorName; type = fne.Type; } } } else { type = (TypeSpec) member; member = null; } if (ParsedParameters != null) { var old_printer = mc.Module.Compiler.Report.SetPrinter (new NullReportPrinter ()); try { var context = new DocumentationMemberContext (mc, ParsedName ?? MemberName.Null); foreach (var pp in ParsedParameters) { pp.Resolve (context); } } finally { mc.Module.Compiler.Report.SetPrinter (old_printer); } } if (type != null) { if (member_name == null) member_name = ParsedOperator.HasValue ? Operator.GetMetadataName (ParsedOperator.Value) : ParsedName.Name; int parsed_param_count; if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { parsed_param_count = ParsedParameters.Count - 1; } else if (ParsedParameters != null) { parsed_param_count = ParsedParameters.Count; } else { parsed_param_count = 0; } int parameters_match = -1; do { var members = MemberCache.FindMembers (type, member_name, true); if (members != null) { foreach (var m in members) { if (ParsedName != null && m.Arity != ParsedName.Arity) continue; if (ParsedParameters != null) { IParametersMember pm = m as IParametersMember; if (pm == null) continue; if (m.Kind == MemberKind.Operator && !ParsedOperator.HasValue) continue; var pm_params = pm.Parameters; int i; for (i = 0; i < parsed_param_count; ++i) { var pparam = ParsedParameters[i]; if (i >= pm_params.Count || pparam == null || pparam.TypeSpec == null || !TypeSpecComparer.Override.IsEqual (pparam.TypeSpec, pm_params.Types[i]) || (pparam.Modifier & Parameter.Modifier.RefOutMask) != (pm_params.FixedParameters[i].ModFlags & Parameter.Modifier.RefOutMask)) { if (i > parameters_match) { parameters_match = i; } i = -1; break; } } if (i < 0) continue; if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { if (pm.MemberType != ParsedParameters[parsed_param_count].TypeSpec) { parameters_match = parsed_param_count + 1; continue; } } else { if (parsed_param_count != pm_params.Count) continue; } } if (member != null) { Report.Warning (419, 3, mc.Location, "Ambiguous reference in cref attribute `{0}'. Assuming `{1}' but other overloads including `{2}' have also matched", cref, member.GetSignatureForError (), m.GetSignatureForError ()); break; } member = m; } } // Continue with parent type for nested types if (member == null) { type = type.DeclaringType; } else { type = null; } } while (type != null); if (member == null && parameters_match >= 0) { for (int i = parameters_match; i < parsed_param_count; ++i) { Report.Warning (1580, 1, mc.Location, "Invalid type for parameter `{0}' in XML comment cref attribute `{1}'", (i + 1).ToString (), cref); } if (parameters_match == parsed_param_count + 1) { Report.Warning (1581, 1, mc.Location, "Invalid return type in XML comment cref attribute `{0}'", cref); } } } } if (member == null) { Report.Warning (1574, 1, mc.Location, "XML comment on `{0}' has cref attribute `{1}' that could not be resolved", mc.GetSignatureForError (), cref); cref = "!:" + cref; } else if (member == InternalType.Namespace) { cref = "N:" + fne.GetSignatureForError (); } else { prefix = GetMemberDocHead (member); cref = prefix + member.GetSignatureForDocumentation (); } xref.SetAttribute ("cref", cref); }
public static void GenerateEmbedClasses(ModuleContainer module, ParserSession session, Report report) { List<EmbedData> embeds = new List<EmbedData>(); FindEmbedClasses(module, module, embeds); if (embeds.Count == 0) return; var os = new StringWriter(); os.Write (@" // Generated embed loader classes "); foreach (var e in embeds) { var loc = e._field.Location; e._field.Initializer = new TypeOf(new MemberAccess(new SimpleName("_embed_loaders", loc), e._className), loc); os.Write (@" namespace _embed_loaders {{ internal class {1} : PlayScript.EmbedLoader {{ public {1}() : base({2}, {3}, {4}, {5}, {6}) {{ }} }} }} ", PsConsts.PsRootNamespace, e._className, e.source, e.mimeType, e.embedAsCFF, e.fontFamily, e.symbol); } string fileStr = os.ToString(); var path = System.IO.Path.Combine (System.IO.Path.GetDirectoryName(System.IO.Path.GetFullPath(module.Compiler.Settings.OutputFile)), "embed.g.cs"); System.IO.File.WriteAllText(path, fileStr); byte[] byteArray = Encoding.ASCII.GetBytes( fileStr ); var input = new MemoryStream( byteArray, false ); var reader = new SeekableStreamReader (input, System.Text.Encoding.UTF8); SourceFile file = new SourceFile(path, path, 0); file.FileType = SourceFileType.CSharp; Driver.Parse (reader, file, module, session, report); }
void Parse (ModuleContainer module) { bool tokenize_only = module.Compiler.Settings.TokenizeOnly; var sources = module.Compiler.SourceFiles; Location.Initialize (sources); var session = new ParserSession () { UseJayGlobalArrays = true, LocatedTokens = new Tokenizer.LocatedToken[15000], AsLocatedTokens = new Mono.PlayScript.Tokenizer.LocatedToken[15000] }; bool has_playscript_files = false; for (int i = 0; i < sources.Count; ++i) { if (tokenize_only) { tokenize_file (sources[i], module, session); } else { Parse (sources[i], module, session, Report); } if (sources[i].FileType == SourceFileType.PlayScript) { has_playscript_files = true; } } // PlayScript needs to add generated code after parsing. if (has_playscript_files) { Mono.PlayScript.CodeGenerator.GenerateCode(module, session, Report); } }
private void ParseFiles([NotNull] ModuleContainer container) { Location.Initialize(container.Compiler.SourceFiles); var session = new ParserSession { UseJayGlobalArrays = true, LocatedTokens = new LocatedToken[15000] }; foreach (var sourceFile in container.Compiler.SourceFiles) { using (var stream = new FileStream(sourceFile.FullPathName, FileMode.Open)) { var compilationSourceFile = new CompilationSourceFile(container, sourceFile); container.AddTypeContainer(compilationSourceFile); using (var seekStream = new SeekableStreamReader(stream, Encoding.UTF8)) { new CSharpParser(seekStream, compilationSourceFile, _report, session).parse(); } } } }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report, ParserSession session) { this.file = file; current_container = current_namespace = file; this.module = file.Module; this.compiler = file.Compiler; this.settings = compiler.Settings; this.report = report; lang_version = settings.Version; yacc_verbose_flag = settings.VerboseParserFlag; doc_support = settings.DocumentationFile != null; lexer = new Tokenizer (reader, file, session); oob_stack = new Stack<object> (); lbag = session.LocationsBag; use_global_stacks = session.UseJayGlobalArrays; parameters_bucket = session.ParametersStack; }
public static void Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report) { var file = new CompilationSourceFile(module, sourceFile); module.AddTypeContainer(file); if (sourceFile.FileType == SourceFileType.CSharp) { CSharpParser parser = new CSharpParser(reader, file, session); parser.parse(); } else { PlayScriptParser parser = new PlayScriptParser(reader, file, session); parser.parsing_playscript = sourceFile.PsExtended; parser.parse(); } }
/* /// <summary> /// Parses a file snippet; guessing what the code snippet represents (whole file, type members, block, type reference, expression). /// </summary> public AstNode ParseSnippet (string code) { // TODO: add support for parsing a part of a file throw new NotImplementedException (); } */ public DocumentationReference ParseDocumentationReference(string cref) { // see Mono.CSharp.DocumentationBuilder.HandleXrefCommon if (cref == null) throw new ArgumentNullException("cref"); // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace('{', '<').Replace('}', '>'); lock (parseLock) { errorReportPrinter = new ErrorReportPrinter(""); var ctx = new CompilerContext(compilerSettings.ToMono(), errorReportPrinter); ctx.Settings.TabSize = 1; var reader = new SeekableStreamReader(new StringTextSource(cref)); var file = new SourceFile("", "", 0); Location.Initialize(new List<SourceFile>(new [] { file })); var module = new ModuleContainer(ctx); module.DocumentationBuilder = new DocumentationBuilder(module); var source_file = new CompilationSourceFile(module); var report = new Report(ctx, errorReportPrinter); var session = new ParserSession(); session.LocationsBag = new LocationsBag(); var parser = new Mono.CSharp.CSharpParser(reader, source_file, report, session); parser.Lexer.Line += initialLocation.Line - 1; parser.Lexer.Column += initialLocation.Column - 1; parser.Lexer.putback_char = Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse(); if (report.Errors > 0) { // Report.Warning (1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", // mc.GetSignatureForError (), cref); } var conversionVisitor = new ConversionVisitor(false, session.LocationsBag); var docRef = conversionVisitor.ConvertXmlDoc(module.DocumentationBuilder); CompilerCallableEntryPoint.Reset(); return docRef; } }
public static void Parse (SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report) { var file = new CompilationSourceFile (module, sourceFile); module.AddTypeContainer (file); if (sourceFile.FileType == SourceFileType.CSharp) { CSharpParser parser = new CSharpParser (reader, file, session); parser.parse (); } else { PlayScriptParser parser = new PlayScriptParser (reader, file, session); parser.parsing_playscript = sourceFile.PsExtended; parser.parse (); } }
SyntaxTree Parse(ITextSource program, string fileName, int initialLine, int initialColumn) { lock (parseLock) { errorReportPrinter = new ErrorReportPrinter(""); var ctx = new CompilerContext(compilerSettings.ToMono(), errorReportPrinter); ctx.Settings.TabSize = 1; var reader = new SeekableStreamReader(program); var file = new SourceFile(fileName, fileName, 0); Location.Initialize(new List<SourceFile>(new [] { file })); var module = new ModuleContainer(ctx); var session = new ParserSession(); session.LocationsBag = new LocationsBag(); var report = new Report(ctx, errorReportPrinter); var parser = Driver.Parse(reader, file, module, session, report, initialLine - 1, initialColumn - 1); var top = new CompilerCompilationUnit { ModuleCompiled = module, LocationsBag = session.LocationsBag, SpecialsBag = parser.Lexer.sbag, Conditionals = parser.Lexer.SourceFile.Conditionals }; var unit = Parse(top, fileName); unit.Errors.AddRange(errorReportPrinter.Errors); CompilerCallableEntryPoint.Reset(); return unit; } }
public static CSharpParser Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report, int lineModifier = 0, int colModifier = 0) { var file = new CompilationSourceFile (module, sourceFile); module.AddTypeContainer(file); CSharpParser parser = new CSharpParser (reader, file, report, session); parser.Lexer.Line += lineModifier; parser.Lexer.Column += colModifier; parser.Lexer.sbag = new SpecialsBag (); parser.parse (); return parser; }