void tokenize_file(SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead(sourceFile.Name); } catch { Report.Error(2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input) { SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding); var file = new CompilationSourceFile(module, sourceFile); Tokenizer lexer = new Tokenizer(reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token()) != Token.EOF) { tokens++; if (token == Token.ERROR) { errors++; } } Console.WriteLine("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
public void Parse(SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = File.OpenRead(file.Name); } catch { report.Error(2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte() == 77 && input.ReadByte() == 90) { report.Error(2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close(); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader(input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse(reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm(); file.SetChecksum(checksum.ComputeHash(input)); } reader.Dispose(); input.Close(); }
void tokenize_file (SourceFile sourceFile, ModuleContainer module, ParserSession session) { Stream input; try { input = File.OpenRead (sourceFile.Name); } catch { Report.Error (2001, "Source file `" + sourceFile.Name + "' could not be found"); return; } using (input){ SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding); var file = new CompilationSourceFile (module, sourceFile); Tokenizer lexer = new Tokenizer (reader, file, session, ctx.Report); int token, tokens = 0, errors = 0; while ((token = lexer.token ()) != Token.EOF){ tokens++; if (token == Token.ERROR) errors++; } Console.WriteLine ("Tokenized: " + tokens + " found " + errors + " errors"); } return; }
public static CSharpParser Parse(SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report, int lineModifier = 0, int colModifier = 0) { var file = new CompilationSourceFile(module, sourceFile); module.AddTypeContainer(file); CSharpParser parser = new CSharpParser(reader, file, report, session); parser.Lexer.Line += lineModifier; parser.Lexer.Column += colModifier; parser.Lexer.sbag = new SpecialsBag(); parser.parse(); return(parser); }
// // Parses the string @input and returns a CSharpParser if succeeful. // // if @silent is set to true then no errors are // reported to the user. This is used to do various calls to the // parser and check if the expression is parsable. // // @partial_input: if @silent is true, then it returns whether the // parsed expression was partial, and more data is needed // CSharpParser ParseString(ParseMode mode, string input, out bool partial_input) { partial_input = false; Reset(); var enc = ctx.Settings.Encoding; var s = new MemoryStream(enc.GetBytes(input)); SeekableStreamReader seekable = new SeekableStreamReader(s, enc); InputKind kind = ToplevelOrStatement(seekable); if (kind == InputKind.Error) { if (mode == ParseMode.ReportErrors) { ctx.Report.Error(-25, "Detection Parsing Error"); } partial_input = false; return(null); } if (kind == InputKind.EOF) { if (mode == ParseMode.ReportErrors) { Console.Error.WriteLine("Internal error: EOF condition should have been detected in a previous call with silent=true"); } partial_input = true; return(null); } seekable.Position = 0; source_file.DeclarationFound = false; CSharpParser parser = new CSharpParser(seekable, source_file, new ParserSession()); if (kind == InputKind.StatementOrExpression) { parser.Lexer.putback_char = Tokenizer.EvalStatementParserCharacter; parser.Lexer.parsing_block++; ctx.Settings.StatementMode = true; } else { parser.Lexer.putback_char = Tokenizer.EvalCompilationUnitParserCharacter; ctx.Settings.StatementMode = false; } if (mode == ParseMode.GetCompletions) { parser.Lexer.CompleteOnEOF = true; } ReportPrinter old_printer = null; if ((mode == ParseMode.Silent || mode == ParseMode.GetCompletions)) { old_printer = ctx.Report.SetPrinter(new StreamReportPrinter(TextWriter.Null)); } try { parser.parse(); } finally { if (ctx.Report.Errors != 0) { if (mode != ParseMode.ReportErrors && parser.UnexpectedEOF) { partial_input = true; } if (parser.undo != null) { parser.undo.ExecuteUndo(); } parser = null; } if (old_printer != null) { ctx.Report.SetPrinter(old_printer); } } return(parser); }
// // Deambiguates the input string to determine if we // want to process a statement or if we want to // process a compilation unit. // // This is done using a top-down predictive parser, // since the yacc/jay parser can not deambiguage this // without more than one lookahead token. There are very // few ambiguities. // InputKind ToplevelOrStatement(SeekableStreamReader seekable) { Tokenizer tokenizer = new Tokenizer(seekable, source_file, new ParserSession(), ctx.Report); // Prefer contextual block keywords over identifiers tokenizer.parsing_block++; int t = tokenizer.token(); switch (t) { case Token.EOF: return(InputKind.EOF); // These are toplevels case Token.EXTERN: case Token.OPEN_BRACKET: case Token.ABSTRACT: case Token.CLASS: case Token.ENUM: case Token.INTERFACE: case Token.INTERNAL: case Token.NAMESPACE: case Token.PRIVATE: case Token.PROTECTED: case Token.PUBLIC: case Token.SEALED: case Token.STATIC: case Token.STRUCT: return(InputKind.CompilationUnit); // Definitely expression case Token.FIXED: case Token.BOOL: case Token.BYTE: case Token.CHAR: case Token.DECIMAL: case Token.DOUBLE: case Token.FLOAT: case Token.INT: case Token.LONG: case Token.NEW: case Token.OBJECT: case Token.SBYTE: case Token.SHORT: case Token.STRING: case Token.UINT: case Token.ULONG: return(InputKind.StatementOrExpression); // These need deambiguation help case Token.USING: t = tokenizer.token(); if (t == Token.EOF) { return(InputKind.EOF); } if (t == Token.IDENTIFIER) { return(InputKind.CompilationUnit); } return(InputKind.StatementOrExpression); // Distinguish between: // delegate opt_anonymous_method_signature block // delegate type case Token.DELEGATE: t = tokenizer.token(); if (t == Token.EOF) { return(InputKind.EOF); } if (t == Token.OPEN_PARENS || t == Token.OPEN_BRACE) { return(InputKind.StatementOrExpression); } return(InputKind.CompilationUnit); // Distinguih between: // unsafe block // unsafe as modifier of a type declaration case Token.UNSAFE: t = tokenizer.token(); if (t == Token.EOF) { return(InputKind.EOF); } if (t == Token.OPEN_PARENS) { return(InputKind.StatementOrExpression); } return(InputKind.CompilationUnit); // These are errors: we list explicitly what we had // from the grammar, ERROR and then everything else case Token.READONLY: case Token.OVERRIDE: case Token.ERROR: return(InputKind.Error); // This catches everything else allowed by // expressions. We could add one-by-one use cases // if needed. default: return(InputKind.StatementOrExpression); } }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, Report report, ParserSession session) { this.file = file; current_container = current_namespace = file; this.module = file.Module; this.compiler = file.Compiler; this.settings = compiler.Settings; this.report = report; lang_version = settings.Version; yacc_verbose_flag = settings.VerboseParserFlag; doc_support = settings.DocumentationFile != null; lexer = new Tokenizer (reader, file, session, report); oob_stack = new Stack<object> (); lbag = session.LocationsBag; use_global_stacks = session.UseJayGlobalArrays; parameters_bucket = session.ParametersStack; }
public CSharpParser (SeekableStreamReader reader, CompilationSourceFile file, ParserSession session) : this (reader, file, file.Compiler.Report, session) { }
// // Processes "see" or "seealso" elements from cref attribute. // void HandleXrefCommon(MemberCore mc, XmlElement xref) { string cref = xref.GetAttribute("cref"); // when, XmlReader, "if (cref == null)" if (!xref.HasAttribute("cref")) { return; } // Nothing to be resolved the reference is marked explicitly if (cref.Length > 2 && cref [1] == ':') { return; } // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace('{', '<').Replace('}', '>'); var encoding = module.Compiler.Settings.Encoding; var s = new MemoryStream(encoding.GetBytes(cref)); var source_file = new CompilationSourceFile(doc_module, mc.Location.SourceFile); var report = new Report(doc_module.Compiler, new NullReportPrinter()); if (session == null) { session = new ParserSession { UseJayGlobalArrays = true } } ; SeekableStreamReader seekable = new SeekableStreamReader(s, encoding, session.StreamReaderBuffer); var parser = new CSharpParser(seekable, source_file, report, session); ParsedParameters = null; ParsedName = null; ParsedBuiltinType = null; ParsedOperator = null; parser.Lexer.putback_char = Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse(); if (report.Errors > 0) { Report.Warning(1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", mc.GetSignatureForError(), cref); xref.SetAttribute("cref", "!:" + cref); return; } MemberSpec member; string prefix = null; FullNamedExpression fne = null; // // Try built-in type first because we are using ParsedName as identifier of // member names on built-in types // if (ParsedBuiltinType != null && (ParsedParameters == null || ParsedName != null)) { member = ParsedBuiltinType.Type; } else { member = null; } if (ParsedName != null || ParsedOperator.HasValue) { TypeSpec type = null; string member_name = null; if (member == null) { if (ParsedOperator.HasValue) { type = mc.CurrentType; } else if (ParsedName.Left != null) { fne = ResolveMemberName(mc, ParsedName.Left); if (fne != null) { var ns = fne as NamespaceExpression; if (ns != null) { fne = ns.LookupTypeOrNamespace(mc, ParsedName.Name, ParsedName.Arity, LookupMode.Probing, Location.Null); if (fne != null) { member = fne.Type; } } else { type = fne.Type; } } } else { fne = ResolveMemberName(mc, ParsedName); if (fne == null) { type = mc.CurrentType; } else if (ParsedParameters == null) { member = fne.Type; } else if (fne.Type.MemberDefinition == mc.CurrentType.MemberDefinition) { member_name = Constructor.ConstructorName; type = fne.Type; } } } else { type = (TypeSpec)member; member = null; } if (ParsedParameters != null) { var old_printer = mc.Module.Compiler.Report.SetPrinter(new NullReportPrinter()); try { var context = new DocumentationMemberContext(mc, ParsedName ?? MemberName.Null); foreach (var pp in ParsedParameters) { pp.Resolve(context); } } finally { mc.Module.Compiler.Report.SetPrinter(old_printer); } } if (type != null) { if (member_name == null) { member_name = ParsedOperator.HasValue ? Operator.GetMetadataName(ParsedOperator.Value) : ParsedName.Name; } int parsed_param_count; if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { parsed_param_count = ParsedParameters.Count - 1; } else if (ParsedParameters != null) { parsed_param_count = ParsedParameters.Count; } else { parsed_param_count = 0; } int parameters_match = -1; do { var members = MemberCache.FindMembers(type, member_name, true); if (members != null) { foreach (var m in members) { if (ParsedName != null && m.Arity != ParsedName.Arity) { continue; } if (ParsedParameters != null) { IParametersMember pm = m as IParametersMember; if (pm == null) { continue; } if (m.Kind == MemberKind.Operator && !ParsedOperator.HasValue) { continue; } var pm_params = pm.Parameters; int i; for (i = 0; i < parsed_param_count; ++i) { var pparam = ParsedParameters[i]; if (i >= pm_params.Count || pparam == null || pparam.TypeSpec == null || !TypeSpecComparer.Override.IsEqual(pparam.TypeSpec, pm_params.Types[i]) || (pparam.Modifier & Parameter.Modifier.RefOutMask) != (pm_params.FixedParameters[i].ModFlags & Parameter.Modifier.RefOutMask)) { if (i > parameters_match) { parameters_match = i; } i = -1; break; } } if (i < 0) { continue; } if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { if (pm.MemberType != ParsedParameters[parsed_param_count].TypeSpec) { parameters_match = parsed_param_count + 1; continue; } } else { if (parsed_param_count != pm_params.Count) { continue; } } } if (member != null) { Report.Warning(419, 3, mc.Location, "Ambiguous reference in cref attribute `{0}'. Assuming `{1}' but other overloads including `{2}' have also matched", cref, member.GetSignatureForError(), m.GetSignatureForError()); break; } member = m; } } // Continue with parent type for nested types if (member == null) { type = type.DeclaringType; } else { type = null; } } while (type != null); if (member == null && parameters_match >= 0) { for (int i = parameters_match; i < parsed_param_count; ++i) { Report.Warning(1580, 1, mc.Location, "Invalid type for parameter `{0}' in XML comment cref attribute `{1}'", (i + 1).ToString(), cref); } if (parameters_match == parsed_param_count + 1) { Report.Warning(1581, 1, mc.Location, "Invalid return type in XML comment cref attribute `{0}'", cref); } } } } if (member == null) { Report.Warning(1574, 1, mc.Location, "XML comment on `{0}' has cref attribute `{1}' that could not be resolved", mc.GetSignatureForError(), cref); cref = "!:" + cref; } else if (member == InternalType.Namespace) { cref = "N:" + fne.GetSignatureForError(); } else { prefix = GetMemberDocHead(member); cref = prefix + member.GetSignatureForDocumentation(); } xref.SetAttribute("cref", cref); }
public static CSharpParser Parse (SeekableStreamReader reader, SourceFile sourceFile, ModuleContainer module, ParserSession session, Report report, int lineModifier = 0, int colModifier = 0) { var file = new CompilationSourceFile (module, sourceFile); module.AddTypeContainer(file); CSharpParser parser = new CSharpParser (reader, file, report, session); parser.Lexer.Line += lineModifier; parser.Lexer.Column += colModifier; parser.Lexer.sbag = new SpecialsBag (); parser.parse (); return parser; }
public void Parse (SourceFile file, ModuleContainer module, ParserSession session, Report report) { Stream input; try { input = File.OpenRead (file.Name); } catch { report.Error (2001, "Source file `{0}' could not be found", file.Name); return; } // Check 'MZ' header if (input.ReadByte () == 77 && input.ReadByte () == 90) { report.Error (2015, "Source file `{0}' is a binary file and not a text file", file.Name); input.Close (); return; } input.Position = 0; SeekableStreamReader reader = new SeekableStreamReader (input, ctx.Settings.Encoding, session.StreamReaderBuffer); Parse (reader, file, module, session, report); if (ctx.Settings.GenerateDebugInfo && report.Errors == 0 && !file.HasChecksum) { input.Position = 0; var checksum = session.GetChecksumAlgorithm (); file.SetChecksum (checksum.ComputeHash (input)); } reader.Dispose (); input.Close (); }
/* /// <summary> /// Parses a file snippet; guessing what the code snippet represents (whole file, type members, block, type reference, expression). /// </summary> public AstNode ParseSnippet (string code) { // TODO: add support for parsing a part of a file throw new NotImplementedException (); } */ public DocumentationReference ParseDocumentationReference(string cref) { // see ICSharpCode.NRefactory.MonoCSharp.DocumentationBuilder.HandleXrefCommon if (cref == null) throw new ArgumentNullException("cref"); // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace('{', '<').Replace('}', '>'); lock (parseLock) { errorReportPrinter = new ErrorReportPrinter(""); var ctx = new CompilerContext(compilerSettings.ToMono(), errorReportPrinter); ctx.Settings.TabSize = 1; var reader = new SeekableStreamReader(new StringTextSource(cref)); var file = new SourceFile("", "", 0); Location.Initialize(new List<SourceFile>(new [] { file })); var module = new ModuleContainer(ctx); module.DocumentationBuilder = new DocumentationBuilder(module); var source_file = new CompilationSourceFile(module); var report = new Report(ctx, errorReportPrinter); var session = new ParserSession(); session.LocationsBag = new LocationsBag(); var parser = new ICSharpCode.NRefactory.MonoCSharp.CSharpParser(reader, source_file, report, session); parser.Lexer.Line += initialLocation.Line - 1; parser.Lexer.Column += initialLocation.Column - 1; parser.Lexer.putback_char = Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse(); if (report.Errors > 0) { // Report.Warning (1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", // mc.GetSignatureForError (), cref); } var conversionVisitor = new ConversionVisitor(false, session.LocationsBag); var docRef = conversionVisitor.ConvertXmlDoc(module.DocumentationBuilder); CompilerCallableEntryPoint.Reset(); return docRef; } }
SyntaxTree Parse(ITextSource program, string fileName, int initialLine, int initialColumn) { lock (parseLock) { errorReportPrinter = new ErrorReportPrinter(""); var ctx = new CompilerContext(compilerSettings.ToMono(), errorReportPrinter); ctx.Settings.TabSize = 1; var reader = new SeekableStreamReader(program); var file = new SourceFile(fileName, fileName, 0); Location.Initialize(new List<SourceFile>(new [] { file })); var module = new ModuleContainer(ctx); var session = new ParserSession(); session.LocationsBag = new LocationsBag(); var report = new Report(ctx, errorReportPrinter); var parser = Driver.Parse(reader, file, module, session, report, initialLine - 1, initialColumn - 1); var top = new CompilerCompilationUnit { ModuleCompiled = module, LocationsBag = session.LocationsBag, SpecialsBag = parser.Lexer.sbag, Conditionals = parser.Lexer.SourceFile.Conditionals }; var unit = Parse(top, fileName); unit.Errors.AddRange(errorReportPrinter.Errors); CompilerCallableEntryPoint.Reset(); return unit; } }
// // Processes "see" or "seealso" elements from cref attribute. // void HandleXrefCommon (MemberCore mc, XmlElement xref) { string cref = xref.GetAttribute ("cref"); // when, XmlReader, "if (cref == null)" if (!xref.HasAttribute ("cref")) return; // Nothing to be resolved the reference is marked explicitly if (cref.Length > 2 && cref [1] == ':') return; // Additional symbols for < and > are allowed for easier XML typing cref = cref.Replace ('{', '<').Replace ('}', '>'); var encoding = module.Compiler.Settings.Encoding; var s = new MemoryStream (encoding.GetBytes (cref)); var source_file = new CompilationSourceFile (doc_module, mc.Location.SourceFile); var report = new Report (doc_module.Compiler, new NullReportPrinter ()); if (session == null) session = new ParserSession { UseJayGlobalArrays = true }; SeekableStreamReader seekable = new SeekableStreamReader (s, encoding, session.StreamReaderBuffer); var parser = new CSharpParser (seekable, source_file, report, session); ParsedParameters = null; ParsedName = null; ParsedBuiltinType = null; ParsedOperator = null; parser.Lexer.putback_char = Tokenizer.DocumentationXref; parser.Lexer.parsing_generic_declaration_doc = true; parser.parse (); if (report.Errors > 0) { Report.Warning (1584, 1, mc.Location, "XML comment on `{0}' has syntactically incorrect cref attribute `{1}'", mc.GetSignatureForError (), cref); xref.SetAttribute ("cref", "!:" + cref); return; } MemberSpec member; string prefix = null; FullNamedExpression fne = null; // // Try built-in type first because we are using ParsedName as identifier of // member names on built-in types // if (ParsedBuiltinType != null && (ParsedParameters == null || ParsedName != null)) { member = ParsedBuiltinType.Type; } else { member = null; } if (ParsedName != null || ParsedOperator.HasValue) { TypeSpec type = null; string member_name = null; if (member == null) { if (ParsedOperator.HasValue) { type = mc.CurrentType; } else if (ParsedName.Left != null) { fne = ResolveMemberName (mc, ParsedName.Left); if (fne != null) { var ns = fne as NamespaceExpression; if (ns != null) { fne = ns.LookupTypeOrNamespace (mc, ParsedName.Name, ParsedName.Arity, LookupMode.Probing, Location.Null); if (fne != null) { member = fne.Type; } } else { type = fne.Type; } } } else { fne = ResolveMemberName (mc, ParsedName); if (fne == null) { type = mc.CurrentType; } else if (ParsedParameters == null) { member = fne.Type; } else if (fne.Type.MemberDefinition == mc.CurrentType.MemberDefinition) { member_name = Constructor.ConstructorName; type = fne.Type; } } } else { type = (TypeSpec) member; member = null; } if (ParsedParameters != null) { var old_printer = mc.Module.Compiler.Report.SetPrinter (new NullReportPrinter ()); try { var context = new DocumentationMemberContext (mc, ParsedName ?? MemberName.Null); foreach (var pp in ParsedParameters) { pp.Resolve (context); } } finally { mc.Module.Compiler.Report.SetPrinter (old_printer); } } if (type != null) { if (member_name == null) member_name = ParsedOperator.HasValue ? Operator.GetMetadataName (ParsedOperator.Value) : ParsedName.Name; int parsed_param_count; if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { parsed_param_count = ParsedParameters.Count - 1; } else if (ParsedParameters != null) { parsed_param_count = ParsedParameters.Count; } else { parsed_param_count = 0; } int parameters_match = -1; do { var members = MemberCache.FindMembers (type, member_name, true); if (members != null) { foreach (var m in members) { if (ParsedName != null && m.Arity != ParsedName.Arity) continue; if (ParsedParameters != null) { IParametersMember pm = m as IParametersMember; if (pm == null) continue; if (m.Kind == MemberKind.Operator && !ParsedOperator.HasValue) continue; var pm_params = pm.Parameters; int i; for (i = 0; i < parsed_param_count; ++i) { var pparam = ParsedParameters[i]; if (i >= pm_params.Count || pparam == null || pparam.TypeSpec == null || !TypeSpecComparer.Override.IsEqual (pparam.TypeSpec, pm_params.Types[i]) || (pparam.Modifier & Parameter.Modifier.RefOutMask) != (pm_params.FixedParameters[i].ModFlags & Parameter.Modifier.RefOutMask)) { if (i > parameters_match) { parameters_match = i; } i = -1; break; } } if (i < 0) continue; if (ParsedOperator == Operator.OpType.Explicit || ParsedOperator == Operator.OpType.Implicit) { if (pm.MemberType != ParsedParameters[parsed_param_count].TypeSpec) { parameters_match = parsed_param_count + 1; continue; } } else { if (parsed_param_count != pm_params.Count) continue; } } if (member != null) { Report.Warning (419, 3, mc.Location, "Ambiguous reference in cref attribute `{0}'. Assuming `{1}' but other overloads including `{2}' have also matched", cref, member.GetSignatureForError (), m.GetSignatureForError ()); break; } member = m; } } // Continue with parent type for nested types if (member == null) { type = type.DeclaringType; } else { type = null; } } while (type != null); if (member == null && parameters_match >= 0) { for (int i = parameters_match; i < parsed_param_count; ++i) { Report.Warning (1580, 1, mc.Location, "Invalid type for parameter `{0}' in XML comment cref attribute `{1}'", (i + 1).ToString (), cref); } if (parameters_match == parsed_param_count + 1) { Report.Warning (1581, 1, mc.Location, "Invalid return type in XML comment cref attribute `{0}'", cref); } } } } if (member == null) { Report.Warning (1574, 1, mc.Location, "XML comment on `{0}' has cref attribute `{1}' that could not be resolved", mc.GetSignatureForError (), cref); cref = "!:" + cref; } else if (member == InternalType.Namespace) { cref = "N:" + fne.GetSignatureForError (); } else { prefix = GetMemberDocHead (member); cref = prefix + member.GetSignatureForDocumentation (); } xref.SetAttribute ("cref", cref); }
// // Parses the string @input and returns a CSharpParser if succeeful. // // if @silent is set to true then no errors are // reported to the user. This is used to do various calls to the // parser and check if the expression is parsable. // // @partial_input: if @silent is true, then it returns whether the // parsed expression was partial, and more data is needed // CSharpParser ParseString (ParseMode mode, string input, out bool partial_input) { partial_input = false; Reset (); var enc = ctx.Settings.Encoding; var s = new MemoryStream (enc.GetBytes (input)); SeekableStreamReader seekable = new SeekableStreamReader (s, enc); InputKind kind = ToplevelOrStatement (seekable); if (kind == InputKind.Error){ if (mode == ParseMode.ReportErrors) ctx.Report.Error (-25, "Detection Parsing Error"); partial_input = false; return null; } if (kind == InputKind.EOF){ if (mode == ParseMode.ReportErrors) Console.Error.WriteLine ("Internal error: EOF condition should have been detected in a previous call with silent=true"); partial_input = true; return null; } seekable.Position = 0; source_file.DeclarationFound = false; CSharpParser parser = new CSharpParser (seekable, source_file, new ParserSession ()); if (kind == InputKind.StatementOrExpression){ parser.Lexer.putback_char = Tokenizer.EvalStatementParserCharacter; parser.Lexer.parsing_block++; ctx.Settings.StatementMode = true; } else { parser.Lexer.putback_char = Tokenizer.EvalCompilationUnitParserCharacter; ctx.Settings.StatementMode = false; } if (mode == ParseMode.GetCompletions) parser.Lexer.CompleteOnEOF = true; ReportPrinter old_printer = null; if ((mode == ParseMode.Silent || mode == ParseMode.GetCompletions)) old_printer = ctx.Report.SetPrinter (new StreamReportPrinter (TextWriter.Null)); try { parser.parse (); } finally { if (ctx.Report.Errors != 0){ if (mode != ParseMode.ReportErrors && parser.UnexpectedEOF) partial_input = true; if (parser.undo != null) parser.undo.ExecuteUndo (); parser = null; } if (old_printer != null) ctx.Report.SetPrinter (old_printer); } return parser; }
// // Deambiguates the input string to determine if we // want to process a statement or if we want to // process a compilation unit. // // This is done using a top-down predictive parser, // since the yacc/jay parser can not deambiguage this // without more than one lookahead token. There are very // few ambiguities. // InputKind ToplevelOrStatement (SeekableStreamReader seekable) { Tokenizer tokenizer = new Tokenizer (seekable, source_file, new ParserSession (), ctx.Report); // Prefer contextual block keywords over identifiers tokenizer.parsing_block++; int t = tokenizer.token (); switch (t){ case Token.EOF: return InputKind.EOF; // These are toplevels case Token.EXTERN: case Token.OPEN_BRACKET: case Token.ABSTRACT: case Token.CLASS: case Token.ENUM: case Token.INTERFACE: case Token.INTERNAL: case Token.NAMESPACE: case Token.PRIVATE: case Token.PROTECTED: case Token.PUBLIC: case Token.SEALED: case Token.STATIC: case Token.STRUCT: return InputKind.CompilationUnit; // Definitely expression case Token.FIXED: case Token.BOOL: case Token.BYTE: case Token.CHAR: case Token.DECIMAL: case Token.DOUBLE: case Token.FLOAT: case Token.INT: case Token.LONG: case Token.NEW: case Token.OBJECT: case Token.SBYTE: case Token.SHORT: case Token.STRING: case Token.UINT: case Token.ULONG: return InputKind.StatementOrExpression; // These need deambiguation help case Token.USING: t = tokenizer.token (); if (t == Token.EOF) return InputKind.EOF; if (t == Token.IDENTIFIER) return InputKind.CompilationUnit; return InputKind.StatementOrExpression; // Distinguish between: // delegate opt_anonymous_method_signature block // delegate type case Token.DELEGATE: t = tokenizer.token (); if (t == Token.EOF) return InputKind.EOF; if (t == Token.OPEN_PARENS || t == Token.OPEN_BRACE) return InputKind.StatementOrExpression; return InputKind.CompilationUnit; // Distinguih between: // unsafe block // unsafe as modifier of a type declaration case Token.UNSAFE: t = tokenizer.token (); if (t == Token.EOF) return InputKind.EOF; if (t == Token.OPEN_PARENS) return InputKind.StatementOrExpression; return InputKind.CompilationUnit; // These are errors: we list explicitly what we had // from the grammar, ERROR and then everything else case Token.READONLY: case Token.OVERRIDE: case Token.ERROR: return InputKind.Error; // This catches everything else allowed by // expressions. We could add one-by-one use cases // if needed. default: return InputKind.StatementOrExpression; } }