internal CompilationUnitSyntax ParseCompilationUnitCore() { #if DEBUG && DUMP_TIMES DateTime t = DateTime.Now; #endif if (_options.ShowIncludes) { _options.ConsoleOutput.WriteLine("Compiling {0}", _fileName); } var sourceText = _text.ToString(); XSharpLexer lexer = null; XSharpPreprocessor pp = null; XSharpParserRuleContext tree = new XSharpParserRuleContext(); XSharpParser parser = null; var parseErrors = ParseErrorData.NewBag(); try { lexer = XSharpLexer.Create(sourceText, _fileName, _options); lexer.Options = _options; _lexerTokenStream = lexer.GetTokenStream(); } catch (Exception e) { // Exception during Lexing parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace)); // create empty token stream so we can continue the rest of the code _lexerTokenStream = new BufferedTokenStream(new XSharpListTokenSource(lexer, new List <IToken>())); } #if DEBUG && DUMP_TIMES { var ts = DateTime.Now - t; t += ts; Debug.WriteLine("Lexing completed in {0}", ts); } #endif // do not pre-process when there were lexer exceptions if (lexer != null && parseErrors.Count == 0) { foreach (var e in lexer.LexErrors) { parseErrors.Add(e); } BufferedTokenStream ppStream = null; try { // Check for #pragma in the lexerTokenStream _lexerTokenStream.Fill(); if (!_options.MacroScript) { pp = new XSharpPreprocessor(lexer, _lexerTokenStream, _options, _fileName, _text.Encoding, _text.ChecksumAlgorithm, parseErrors); } var mustPreprocess = !_options.MacroScript && (lexer.HasPreprocessorTokens || !_options.NoStdDef); if (mustPreprocess) { var ppTokens = pp.PreProcess(); ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, ppTokens)); } else { // No Standard Defs and no preprocessor tokens in the lexer // so we bypass the preprocessor and use the lexer tokenstream // but if a .ppo is required we must use the preprocessor to // write the source text to the .ppo file if (_options.PreprocessorOutput && pp != null) { pp.writeToPPO(sourceText, false); } BufferedTokenStream ts = (BufferedTokenStream)_lexerTokenStream; var tokens = ts.GetTokens(); // commontokenstream filters on tokens on the default channel. All other tokens are ignored ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, tokens)); } ppStream.Fill(); _preprocessorTokenStream = ppStream; } catch (Exception e) { // Exception during Preprocessing parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace)); // create empty token stream so we can continue the rest of the code _preprocessorTokenStream = new BufferedTokenStream(new XSharpListTokenSource(lexer, new List <IToken>())); } } #if DEBUG && DUMP_TIMES { var ts = DateTime.Now - t; t += ts; Debug.WriteLine("Preprocessing completed in {0}", ts); } #endif parser = new XSharpParser(_preprocessorTokenStream) { Options = _options }; tree = new XSharpParserRuleContext(); if (_options.ParseLevel != ParseLevel.Lex) { // When parsing in Sll mode we do not record any parser errors. // When this fails, then we try again with LL mode and then we record errors parser.RemoveErrorListeners(); parser.Interpreter.PredictionMode = PredictionMode.Sll; // some options to have FAST parsing parser.Interpreter.tail_call_preserves_sll = false; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = true; parser.ErrorHandler = new BailErrorStrategy(); try { tree = buildTree(parser); } catch (ParseCanceledException e) { if (_options.Verbose) { string msg = _GetInnerExceptionMessage(e); _options.ConsoleOutput.WriteLine("Antlr: SLL parsing failed with failure: " + msg + ". Trying again in LL mode."); } var errorListener = new XSharpErrorListener(_fileName, parseErrors); parser.AddErrorListener(errorListener); parser.ErrorHandler = new XSharpErrorStrategy(); // we need to set force_global_context to get proper error messages. This makes parsing slower // but gives better messages parser.Interpreter.treat_sllk1_conflict_as_ambiguity = false; parser.Interpreter.force_global_context = true; parser.Interpreter.enable_global_context_dfa = true; parser.Interpreter.PredictionMode = PredictionMode.Ll; _preprocessorTokenStream.Reset(); if (_options.Verbose && pp != null) { pp.DumpStats(); } if (pp != null) { pp.Close(); } parser.Reset(); try { tree = buildTree(parser); } catch (Exception e1) { // Cannot parse again. Must be a syntax error. if (_options.Verbose) { string msg = _GetInnerExceptionMessage(e1); _options.ConsoleOutput.WriteLine("Antlr: LL parsing also failed with failure: " + msg); } } } }// _options.ParseLevel < Complete #if DEBUG && DUMP_TIMES { var ts = DateTime.Now - t; t += ts; Debug.WriteLine("Parsing completed in {0}", ts); } #endif if (_options.DumpAST && tree != null) { string strTree = tree.ToStringTree(); string file = System.IO.Path.ChangeExtension(_fileName, "ast"); strTree = strTree.Replace(@"\r\n)))))", @"\r\n*)))))" + "\r\n"); strTree = strTree.Replace(@"\r\n))))", @"\r\n*)))" + "\r\n"); strTree = strTree.Replace(@"\r\n)))", @"\r\n*)))" + "\r\n"); strTree = strTree.Replace(@"\r\n))", @"\r\n*))" + "\r\n"); strTree = strTree.Replace(@"\r\n)", @"\r\n*)" + "\r\n"); strTree = strTree.Replace(@"\r\n*)", @"\r\n)"); System.IO.File.WriteAllText(file, strTree); } var walker = new ParseTreeWalker(); if (_options.ParseLevel == ParseLevel.Complete) { // check for parser errors, such as missing tokens // This adds items to the parseErrors list for missing // tokens and missing keywords try { var errchecker = new XSharpParseErrorAnalysis(parser, parseErrors, _options); walker.Walk(errchecker, tree); } catch (Exception e) { parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace)); } } var treeTransform = CreateTransform(parser, _options, _pool, _syntaxFactory, _fileName); bool hasErrors = false; SyntaxToken eof = null; try { if (_options.ParseLevel < ParseLevel.Complete || parser.NumberOfSyntaxErrors != 0 || (parseErrors.Count != 0 && parseErrors.Contains(p => !ErrorFacts.IsWarning(p.Code)))) { eof = SyntaxFactory.Token(SyntaxKind.EndOfFileToken); eof = AddLeadingSkippedSyntax(eof, ParserErrorsAsTrivia(parseErrors, pp.IncludedFiles)); if (tree != null) { eof.XNode = new XTerminalNodeImpl(tree.Stop); } else { eof.XNode = new XTerminalNodeImpl(_lexerTokenStream.Get(_lexerTokenStream.Size - 1)); } hasErrors = true; } if (!hasErrors) { try { walker.Walk(treeTransform, tree); } catch (Exception e) { parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace)); } eof = SyntaxFactory.Token(SyntaxKind.EndOfFileToken); if (!parseErrors.IsEmpty()) { eof = AddLeadingSkippedSyntax(eof, ParserErrorsAsTrivia(parseErrors, pp.IncludedFiles)); } } var result = _syntaxFactory.CompilationUnit( treeTransform.GlobalEntities.Externs, treeTransform.GlobalEntities.Usings, treeTransform.GlobalEntities.Attributes, treeTransform.GlobalEntities.Members, eof); result.XNode = tree; tree.CsNode = result; result.XTokens = _lexerTokenStream; result.XPPTokens = _preprocessorTokenStream; result.HasDocComments = lexer.HasDocComments; if (!_options.MacroScript && !hasErrors) { result.InitProcedures = treeTransform.GlobalEntities.InitProcedures; result.Globals = treeTransform.GlobalEntities.Globals; result.PragmaWarnings = treeTransform.GlobalEntities.PragmaWarnings; result.PragmaOptions = treeTransform.GlobalEntities.PragmaOptions; result.IncludedFiles = pp?.IncludedFiles; result.FileWidePublics = treeTransform.GlobalEntities.FileWidePublics; result.HasPCall = treeTransform.GlobalEntities.HasPCall; result.NeedsProcessing = treeTransform.GlobalEntities.NeedsProcessing; if (_options.HasRuntime) { result.LiteralSymbols = ((XSharpTreeTransformationRT)treeTransform).LiteralSymbols; result.LiteralPSZs = ((XSharpTreeTransformationRT)treeTransform).LiteralPSZs; } } return(result); } finally { #if DEBUG && DUMP_TIMES { var ts = DateTime.Now - t; t += ts; Debug.WriteLine("Tree transform completed in {0}", ts); } #endif treeTransform.Free(); if (pp != null) { pp.Close(); } } }