Esempio n. 1
0
        public static bool Lex(string sourceText, string fileName, CSharpParseOptions options, IErrorListener listener,
                               out ITokenStream tokens)
        {
            tokens = null;
            var parseErrors = ParseErrorData.NewBag();

            try
            {
                var lexer = XSharpLexer.Create(sourceText, fileName, options);
                lexer.Options = options;
                var tokenStream = lexer.GetTokenStream();
                tokenStream.Fill();
                tokens = tokenStream;
                GetLexerErrors(lexer, tokenStream, parseErrors);
                #region Determine if we need to preprocess
                bool mustPreprocess = true;
                if (options.NoStdDef)
                {
                    mustPreprocess = lexer.MustBeProcessed || lexer.HasPreprocessorTokens;
                }

                #endregion
                XSharpPreprocessor  pp       = null;
                BufferedTokenStream ppStream = null;
                pp = new XSharpPreprocessor(lexer, tokenStream, options, fileName, Encoding.Unicode, SourceHashAlgorithm.None, parseErrors);

                if (mustPreprocess)
                {
                    var ppTokens = pp.PreProcess();
                    ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, ppTokens));
                }
                else
                {
                    // No Standard Defs and no preprocessor tokens in the lexer
                    // so we bypass the preprocessor and use the lexer token stream
                    ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, tokenStream.GetTokens()));
                }
                ppStream.Fill();
            }
            catch (Exception)
            {
            }
            ReportErrors(parseErrors, listener);
            return(tokens != null);
        }
Esempio n. 2
0
        private static void Parse(string fileName)
        {
            ITokenStream           stream;
            IList <ParseErrorData> parseErrors = ParseErrorData.NewBag();
            var filestream = new AntlrFileStream(fileName);
            var lexer      = new XSharpLexer(filestream);

            lexer.TokenFactory = XSharpTokenFactory.Default;
            stream             = new CommonTokenStream(lexer, Lexer.DefaultTokenChannel);
            var parser = new XSharpParser(stream);

            parser.IsScript = false;
            parser.AllowFunctionInsideClass = false;
            parser.AllowNamedArgs           = false;
            parser.AllowXBaseVariables      = false;
            parser.RemoveErrorListeners();

            parser.Interpreter.PredictionMode    = PredictionMode.Sll;
            parser.Interpreter.reportAmbiguities = true;

            parser.Interpreter.enable_global_context_dfa = true; // default false
            parser.Interpreter.optimize_tail_calls       = true;
            parser.Interpreter.tail_call_preserves_sll   = true;
            //parser.Interpreter.userWantsCtxSensitive = true; // default true

            parser.ErrorHandler = new XSharpErrorStrategy();
            parser.AddErrorListener(new XSharpErrorListener(fileName, parseErrors, true));
            XSharpParserRuleContext tree;

            try
            {
                tree = parser.source();
            }
            catch (ParseCanceledException)
            {
                Console.WriteLine("Parse error, Errors from SLL mode");
                showErrors(parseErrors);
                parseErrors.Clear();
                parser.ErrorHandler = new XSharpErrorStrategy();
                parser.AddErrorListener(new XSharpErrorListener(fileName, parseErrors, true));
                parser.Interpreter.PredictionMode       = PredictionMode.Ll;
                parser.Interpreter.force_global_context = true;
                parser.Interpreter.optimize_ll1         = false;
                parser.Interpreter.reportAmbiguities    = true;
                parser.Reset();
                try
                {
                    tree = parser.source();
                }
                catch (Exception e)
                {
                    tree = null;
                    Console.WriteLine(e.Message);
                }
            }
            // find parser errors (missing tokens etc)

            foreach (var e in lexer.LexErrors)
            {
                parseErrors.Add(e);
            }
            var walker     = new ParseTreeWalker();
            var errchecker = new XSharpParseErrorAnalysis(parser, parseErrors);

            if (tree != null)
            {
                walker.Walk(errchecker, tree);
            }
            Console.WriteLine("Parse error, Errors:");
            showErrors(parseErrors);
        }
Esempio n. 3
0
        internal CompilationUnitSyntax ParseCompilationUnitCore()
        {
#if DEBUG && DUMP_TIMES
            DateTime t = DateTime.Now;
#endif
            if (_options.ShowIncludes)
            {
                _options.ConsoleOutput.WriteLine("Compiling {0}", _fileName);
            }
            var                     sourceText  = _text.ToString();
            XSharpLexer             lexer       = null;
            XSharpPreprocessor      pp          = null;
            XSharpParserRuleContext tree        = new XSharpParserRuleContext();
            XSharpParser            parser      = null;
            var                     parseErrors = ParseErrorData.NewBag();
            try
            {
                lexer             = XSharpLexer.Create(sourceText, _fileName, _options);
                lexer.Options     = _options;
                _lexerTokenStream = lexer.GetTokenStream();
            }
            catch (Exception e)
            {
                // Exception during Lexing
                parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace));
                // create empty token stream so we can continue the rest of the code
                _lexerTokenStream = new BufferedTokenStream(new XSharpListTokenSource(lexer, new List <IToken>()));
            }
#if DEBUG && DUMP_TIMES
            {
                var ts = DateTime.Now - t;
                t += ts;
                Debug.WriteLine("Lexing completed in {0}", ts);
            }
#endif
            // do not pre-process when there were lexer exceptions
            if (lexer != null && parseErrors.Count == 0)
            {
                foreach (var e in lexer.LexErrors)
                {
                    parseErrors.Add(e);
                }
                BufferedTokenStream ppStream = null;
                try
                {
                    // Check for #pragma in the lexerTokenStream
                    _lexerTokenStream.Fill();

                    if (!_options.MacroScript)
                    {
                        pp = new XSharpPreprocessor(lexer, _lexerTokenStream, _options, _fileName, _text.Encoding, _text.ChecksumAlgorithm, parseErrors);
                    }
                    var mustPreprocess = !_options.MacroScript && (lexer.HasPreprocessorTokens || !_options.NoStdDef);
                    if (mustPreprocess)
                    {
                        var ppTokens = pp.PreProcess();
                        ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, ppTokens));
                    }
                    else
                    {
                        // No Standard Defs and no preprocessor tokens in the lexer
                        // so we bypass the preprocessor and use the lexer tokenstream
                        // but if a .ppo is required we must use the preprocessor to
                        // write the source text to the .ppo file
                        if (_options.PreprocessorOutput && pp != null)
                        {
                            pp.writeToPPO(sourceText, false);
                        }
                        BufferedTokenStream ts = (BufferedTokenStream)_lexerTokenStream;
                        var tokens             = ts.GetTokens();
                        // commontokenstream filters on tokens on the default channel. All other tokens are ignored
                        ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, tokens));
                    }
                    ppStream.Fill();
                    _preprocessorTokenStream = ppStream;
                }
                catch (Exception e)
                {
                    // Exception during Preprocessing
                    parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace));
                    // create empty token stream so we can continue the rest of the code
                    _preprocessorTokenStream = new BufferedTokenStream(new XSharpListTokenSource(lexer, new List <IToken>()));
                }
            }
#if DEBUG && DUMP_TIMES
            {
                var ts = DateTime.Now - t;
                t += ts;
                Debug.WriteLine("Preprocessing completed in {0}", ts);
            }
#endif
            parser = new XSharpParser(_preprocessorTokenStream)
            {
                Options = _options
            };

            tree = new XSharpParserRuleContext();
            if (_options.ParseLevel != ParseLevel.Lex)
            {
                // When parsing in Sll mode we do not record any parser errors.
                // When this fails, then we try again with LL mode and then we record errors
                parser.RemoveErrorListeners();
                parser.Interpreter.PredictionMode = PredictionMode.Sll;
                // some options to have FAST parsing
                parser.Interpreter.tail_call_preserves_sll           = false;
                parser.Interpreter.treat_sllk1_conflict_as_ambiguity = true;
                parser.ErrorHandler = new BailErrorStrategy();
                try
                {
                    tree = buildTree(parser);
                }
                catch (ParseCanceledException e)
                {
                    if (_options.Verbose)
                    {
                        string msg = _GetInnerExceptionMessage(e);
                        _options.ConsoleOutput.WriteLine("Antlr: SLL parsing failed with failure: " + msg + ". Trying again in LL mode.");
                    }
                    var errorListener = new XSharpErrorListener(_fileName, parseErrors);
                    parser.AddErrorListener(errorListener);
                    parser.ErrorHandler = new XSharpErrorStrategy();
                    // we need to set force_global_context to get proper error messages. This makes parsing slower
                    // but gives better messages
                    parser.Interpreter.treat_sllk1_conflict_as_ambiguity = false;
                    parser.Interpreter.force_global_context      = true;
                    parser.Interpreter.enable_global_context_dfa = true;
                    parser.Interpreter.PredictionMode            = PredictionMode.Ll;
                    _preprocessorTokenStream.Reset();
                    if (_options.Verbose && pp != null)
                    {
                        pp.DumpStats();
                    }
                    if (pp != null)
                    {
                        pp.Close();
                    }
                    parser.Reset();
                    try
                    {
                        tree = buildTree(parser);
                    }
                    catch (Exception e1)
                    {
                        // Cannot parse again. Must be a syntax error.
                        if (_options.Verbose)
                        {
                            string msg = _GetInnerExceptionMessage(e1);
                            _options.ConsoleOutput.WriteLine("Antlr: LL parsing also failed with failure: " + msg);
                        }
                    }
                }
            }// _options.ParseLevel < Complete
#if DEBUG && DUMP_TIMES
            {
                var ts = DateTime.Now - t;
                t += ts;
                Debug.WriteLine("Parsing completed in {0}", ts);
            }
#endif
            if (_options.DumpAST && tree != null)
            {
                string strTree = tree.ToStringTree();
                string file    = System.IO.Path.ChangeExtension(_fileName, "ast");
                strTree = strTree.Replace(@"\r\n)))))", @"\r\n*)))))" + "\r\n");
                strTree = strTree.Replace(@"\r\n))))", @"\r\n*)))" + "\r\n");
                strTree = strTree.Replace(@"\r\n)))", @"\r\n*)))" + "\r\n");
                strTree = strTree.Replace(@"\r\n))", @"\r\n*))" + "\r\n");
                strTree = strTree.Replace(@"\r\n)", @"\r\n*)" + "\r\n");
                strTree = strTree.Replace(@"\r\n*)", @"\r\n)");
                System.IO.File.WriteAllText(file, strTree);
            }
            var walker = new ParseTreeWalker();


            if (_options.ParseLevel == ParseLevel.Complete)
            {
                // check for parser errors, such as missing tokens
                // This adds items to the parseErrors list for missing
                // tokens and missing keywords
                try
                {
                    var errchecker = new XSharpParseErrorAnalysis(parser, parseErrors, _options);
                    walker.Walk(errchecker, tree);
                }
                catch (Exception e)
                {
                    parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace));
                }
            }
            var         treeTransform = CreateTransform(parser, _options, _pool, _syntaxFactory, _fileName);
            bool        hasErrors     = false;
            SyntaxToken eof           = null;
            try
            {
                if (_options.ParseLevel < ParseLevel.Complete || parser.NumberOfSyntaxErrors != 0 ||
                    (parseErrors.Count != 0 && parseErrors.Contains(p => !ErrorFacts.IsWarning(p.Code))))
                {
                    eof = SyntaxFactory.Token(SyntaxKind.EndOfFileToken);
                    eof = AddLeadingSkippedSyntax(eof, ParserErrorsAsTrivia(parseErrors, pp.IncludedFiles));
                    if (tree != null)
                    {
                        eof.XNode = new XTerminalNodeImpl(tree.Stop);
                    }
                    else
                    {
                        eof.XNode = new XTerminalNodeImpl(_lexerTokenStream.Get(_lexerTokenStream.Size - 1));
                    }
                    hasErrors = true;
                }

                if (!hasErrors)
                {
                    try
                    {
                        walker.Walk(treeTransform, tree);
                    }
                    catch (Exception e)
                    {
                        parseErrors.Add(new ParseErrorData(_fileName, ErrorCode.ERR_Internal, e.Message, e.StackTrace));
                    }
                    eof = SyntaxFactory.Token(SyntaxKind.EndOfFileToken);
                    if (!parseErrors.IsEmpty())
                    {
                        eof = AddLeadingSkippedSyntax(eof, ParserErrorsAsTrivia(parseErrors, pp.IncludedFiles));
                    }
                }
                var result = _syntaxFactory.CompilationUnit(
                    treeTransform.GlobalEntities.Externs,
                    treeTransform.GlobalEntities.Usings,
                    treeTransform.GlobalEntities.Attributes,
                    treeTransform.GlobalEntities.Members, eof);
                result.XNode          = tree;
                tree.CsNode           = result;
                result.XTokens        = _lexerTokenStream;
                result.XPPTokens      = _preprocessorTokenStream;
                result.HasDocComments = lexer.HasDocComments;
                if (!_options.MacroScript && !hasErrors)
                {
                    result.InitProcedures  = treeTransform.GlobalEntities.InitProcedures;
                    result.Globals         = treeTransform.GlobalEntities.Globals;
                    result.PragmaWarnings  = treeTransform.GlobalEntities.PragmaWarnings;
                    result.PragmaOptions   = treeTransform.GlobalEntities.PragmaOptions;
                    result.IncludedFiles   = pp?.IncludedFiles;
                    result.FileWidePublics = treeTransform.GlobalEntities.FileWidePublics;
                    result.HasPCall        = treeTransform.GlobalEntities.HasPCall;
                    result.NeedsProcessing = treeTransform.GlobalEntities.NeedsProcessing;
                    if (_options.HasRuntime)
                    {
                        result.LiteralSymbols = ((XSharpTreeTransformationRT)treeTransform).LiteralSymbols;
                        result.LiteralPSZs    = ((XSharpTreeTransformationRT)treeTransform).LiteralPSZs;
                    }
                }
                return(result);
            }
            finally
            {
#if DEBUG && DUMP_TIMES
                {
                    var ts = DateTime.Now - t;
                    t += ts;
                    Debug.WriteLine("Tree transform completed in {0}", ts);
                }
#endif
                treeTransform.Free();
                if (pp != null)
                {
                    pp.Close();
                }
            }
        }
Esempio n. 4
0
        public static bool Parse(string sourceText, string fileName, CSharpParseOptions options, IErrorListener listener,
                                 out ITokenStream tokens, out XSharpParser.SourceContext tree)
        {
            tree   = null;
            tokens = null;
            var parseErrors = ParseErrorData.NewBag();

            try
            {
                var lexer = XSharpLexer.Create(sourceText, fileName, options);
                lexer.Options = options;
                BufferedTokenStream tokenStream = lexer.GetTokenStream();
                tokenStream.Fill();
                tokens = (ITokenStream)tokenStream;

                GetLexerErrors(lexer, tokenStream, parseErrors);

                // do we need to preprocess
                #region Determine if we really need the preprocessor
                bool mustPreprocess = true;
                if (lexer.HasPreprocessorTokens || !options.NoStdDef)
                {
                    // no need to pre process in partial compilation
                    // if lexer does not contain UDCs, Messages or Includes
                    mustPreprocess = lexer.MustBeProcessed;
                }
                else
                {
                    mustPreprocess = false;
                }
                #endregion
                XSharpPreprocessor  pp       = null;
                BufferedTokenStream ppStream = null;
                pp = new XSharpPreprocessor(lexer, tokenStream, options, fileName, Encoding.Unicode, SourceHashAlgorithm.None, parseErrors);

                if (mustPreprocess)
                {
                    var ppTokens = pp.PreProcess();
                    ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, ppTokens));
                }
                else
                {
                    // No Standard Defs and no preprocessor tokens in the lexer
                    // so we bypass the preprocessor and use the lexer token stream
                    ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, tokenStream.GetTokens()));
                }
                ppStream.Fill();
                var parser = new XSharpParser(ppStream);
                parser.Interpreter.tail_call_preserves_sll = false;     // default = true   Setting to FALSE will reduce memory used by parser
                parser.Options = options;
                tree           = null;
                parser.RemoveErrorListeners();
                parser.Interpreter.PredictionMode = PredictionMode.Sll;
                parser.ErrorHandler = new BailErrorStrategy();
                try
                {
                    tree = parser.source();
                }
                catch (Exception)
                {
                    var errorListener = new XSharpErrorListener(fileName, parseErrors);
                    parser.AddErrorListener(errorListener);
                    parser.ErrorHandler = new XSharpErrorStrategy();
                    parser.Interpreter.PredictionMode = PredictionMode.Ll;
                    ppStream.Reset();
                    parser.Reset();
                    try
                    {
                        tree = parser.source();
                    }
                    catch (Exception)
                    {
                        tree = null;
                    }
                }
            }
            catch (Exception)
            {
                tree = null;
            }
            ReportErrors(parseErrors, listener);
            return(tree != null);
        }