Exemple #1
0
        /// <summary>
        /// For repeated use of a tree pattern, compile it to a
        /// <see cref="ParseTreePattern"/>
        /// using this method.
        /// </summary>
        public virtual ParseTreePattern Compile(string pattern, int patternRuleIndex)
        {
            IList <IToken>    tokenList    = Tokenize(pattern);
            ListTokenSource   tokenSrc     = new ListTokenSource(tokenList);
            CommonTokenStream tokens       = new CommonTokenStream(tokenSrc);
            ParserInterpreter parserInterp = new ParserInterpreter(parser.GrammarFileName, parser.Vocabulary, Arrays.AsList(parser.RuleNames), parser.GetATNWithBypassAlts(), tokens);
            IParseTree        tree         = null;

            try
            {
                parserInterp.ErrorHandler = new BailErrorStrategy();
                tree = parserInterp.Parse(patternRuleIndex);
            }
            catch (ParseCanceledException e)
            {
                //			System.out.println("pattern tree = "+tree.toStringTree(parserInterp));
                throw (RecognitionException)e.InnerException;
            }
            catch (RecognitionException)
            {
                throw;
            }
            catch (Exception e)
            {
                throw new ParseTreePatternMatcher.CannotInvokeStartRule(e);
            }
            // Make sure tree pattern compilation checks for a complete parse
            if (tokens.LA(1) != TokenConstants.EOF)
            {
                throw new ParseTreePatternMatcher.StartRuleDoesNotConsumeFullPattern();
            }
            return(new ParseTreePattern(this, pattern, patternRuleIndex, tree));
        }
Exemple #2
0
    static void Main(string[] args)
    {
        try
        {
            string fileName = "../../../../Text";

            if (args.Length > 0)
            {
                fileName = args[0];
            }

            var code       = File.ReadAllText(fileName);
            var codeStream = new AntlrInputStream(code);
            var lexer      = new __TemplateGrammarName__Lexer(codeStream);
            lexer.RemoveErrorListeners();
            lexer.AddErrorListener(new LexerErrorListener());

            var stopwatch = Stopwatch.StartNew();
            var tokens    = lexer.GetAllTokens();
            stopwatch.Stop();
            Console.WriteLine("LexerTime {0}", stopwatch.Elapsed);
            Console.WriteLine("Tokens {0}", tokens.TokensToString());

/*$ParserPart*/
            string rootRule = null;
            bool   notParse = false;

            if (args.Length > 1)
            {
                rootRule = args[1];
                if (args.Length > 2)
                {
                    bool.TryParse(args[2], out notParse);
                }
            }

            if (!notParse)
            {
                var tokensSource = new ListTokenSource(tokens);
                var tokensStream = new CommonTokenStream(tokensSource);
                var parser       = new __TemplateGrammarName__Parser(tokensStream);
                parser.RemoveErrorListeners();
                parser.AddErrorListener(new ParserErrorListener());

                stopwatch.Restart();
                string ruleName   = rootRule == null ? __TemplateGrammarName__Parser.ruleNames[0] : rootRule;
                var    rootMethod = typeof(__TemplateGrammarName__Parser).GetMethod(ruleName);
                var    ast        = (ParserRuleContext)rootMethod.Invoke(parser, new object[0]);
                stopwatch.Stop();

                Console.WriteLine("ParserTime {0}", stopwatch.Elapsed);
                Console.WriteLine("Tree {0}", ast.ToStringTree(parser));
            }
/*ParserPart$*/
        }
        catch (Exception ex)
        {
            Console.Error.WriteLine(ex.ToString().Replace("\r", "").Replace("\n", ""));
        }
    }
Exemple #3
0
        private CSharpParser.Compilation_unitContext ParseFile(string file)
        {
            IList <IToken> codeTokens = new List <IToken>();

            string sourceCode = File.ReadAllText(file);

            Lexer                    preprocessorLexer    = new CSharpLexer(new AntlrInputStream(sourceCode));
            IList <IToken>           tokens               = preprocessorLexer.GetAllTokens();
            IList <IToken>           directiveTokens      = new List <IToken>();
            ListTokenSource          directiveTokenSource = new ListTokenSource(directiveTokens);
            CommonTokenStream        directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE);
            CSharpPreprocessorParser preprocessorParser   = new CSharpPreprocessorParser(directiveTokenStream);

            int  index           = 0;
            bool compiliedTokens = true;

            while (index < tokens.Count)
            {
                IToken token = tokens[index];
                if (token.Type == CSharpLexer.SHARP)
                {
                    directiveTokens.Clear();
                    int directiveTokenIndex = index + 1;
                    while (directiveTokenIndex < tokens.Count &&
                           tokens[directiveTokenIndex].Type != CSharpLexer.Eof &&
                           tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE &&
                           tokens[directiveTokenIndex].Type != CSharpLexer.SHARP)
                    {
                        if (tokens[directiveTokenIndex].Channel != Lexer.Hidden &&
                            tokens[directiveTokenIndex].Channel != CSharpLexer.COMMENTS_CHANNEL)
                        {
                            directiveTokens.Add(tokens[directiveTokenIndex]);
                        }
                        directiveTokenIndex++;
                    }
                    directiveTokenSource           = new ListTokenSource(directiveTokens);
                    directiveTokenStream           = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE);
                    preprocessorParser.TokenStream = directiveTokenStream;

                    CSharpPreprocessorParser.Preprocessor_directiveContext directive =
                        preprocessorParser.preprocessor_directive();
                    // if true than next code is valid and not ignored.
                    compiliedTokens = directive.value;
                    index           = directiveTokenIndex - 1;
                }
                else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE &&
                         token.Channel != CSharpLexer.COMMENTS_CHANNEL &&
                         compiliedTokens)
                {
                    codeTokens.Add(token); // Collect code tokens.
                }
                index++;
            }

            ListTokenSource   codeTokenSource = new ListTokenSource(codeTokens);
            CommonTokenStream codeTokenStream = new CommonTokenStream(codeTokenSource);
            CSharpParser      parser          = new CSharpParser(codeTokenStream);

            return(parser.compilation_unit());
        }
Exemple #4
0
        public ParseTree Parse(IList <IToken> tokens, out TimeSpan parserTimeSpan)
        {
            if (SourceFile == null)
            {
                throw new ArgumentNullException(nameof(SourceFile));
            }

            if (ErrorListener == null)
            {
                ErrorListener            = new AntlrMemoryErrorListener();
                ErrorListener.Logger     = Logger;
                ErrorListener.LineOffset = LineOffset;
            }

            ErrorListener.SourceFile = SourceFile;

            AntlrParseTree result = null;

            try
            {
                var commentTokens = new List <IToken>();

                foreach (IToken token in tokens)
                {
                    if (token.Channel == CommentsChannel)
                    {
                        commentTokens.Add(token);
                    }
                }

                var stopwatch                = Stopwatch.StartNew();
                var codeTokenSource          = new ListTokenSource(tokens);
                var codeTokenStream          = new CommonTokenStream(codeTokenSource);
                ParserRuleContext syntaxTree = ParseTokens(ErrorListener, codeTokenStream);
                stopwatch.Stop();
                parserTimeSpan = stopwatch.Elapsed;

                result = Create(syntaxTree);

                result.Tokens     = tokens;
                result.Comments   = commentTokens;
                result.SourceFile = SourceFile;
            }
            catch (Exception ex) when(!(ex is ThreadAbortException))
            {
                Logger.LogError(new ParsingException(SourceFile, ex));
            }
            finally
            {
                HandleMemoryConsumption();
            }

            return(result);
        }
Exemple #5
0
        public ITokenSource Edit(ITokenSource tokens)
        {
            CommonTokenStream s      = new CommonTokenStream(tokens);
            HtmlParser        parser = new HtmlParser(s);

            s.Fill();

            var program = parser.htmlDocument();

            visitor.Visit(program, s.GetTokens());
            var list   = visitor.GetResult();
            var result = new ListTokenSource(list);

            result.TokenFactory = tokens.TokenFactory;
            return(result);
        }
        public virtual ITokenSource Edit(ITokenSource tokens)
        {
            CommonTokenStream s      = new CommonTokenStream(tokens);
            JsParser          parser = new JsParser(s);

            s.Fill();

            var program = parser.program();

            visitor.Visit(program, s.GetTokens());

            var source = new ListTokenSource(visitor.GetResult());

            source.TokenFactory = tokens.TokenFactory;
            return(source);
        }
Exemple #7
0
        public ITokenSource Edit(ITokenSource tokens)
        {
            CommonTokenStream s      = new CommonTokenStream(tokens);
            JsParser          parser = new JsParser(s);

            s.Fill();
            IVisitorTree v = new JsVisitorChangeLiteralString(factoryNames, tokens.TokenFactory);

            var program = parser.program();

            v.Visit(program, s.GetTokens());

            var source = new ListTokenSource(v.GetResult());

            source.TokenFactory = tokens.TokenFactory;
            return(source);
        }
        private void ParseCSharp(string input)
        {
            Lexer preprocessorLexer = new CSharpLexer(new AntlrInputStream(input));
            // Collect all tokens with lexer (CSharpLexer.g4).
            var tokens = preprocessorLexer.GetAllTokens();
            //TODO: handle preprocessor tokens
            var          codeTokenSource = new ListTokenSource(tokens);
            var          codeTokenStream = new CommonTokenStream(codeTokenSource);
            CSharpParser parser          = new CSharpParser(codeTokenStream);
            // Parse syntax tree (CSharpParser.g4)
            var tree = parser.compilation_unit();

            if (tree != null)
            {
                var builder = new TreeBuilder(parser, tree, treeModel);
                builder.Build();
            }
        }
Exemple #9
0
        public ParseTree Parse(CodeFile sourceCodeFile)
        {
            AntlrParseTree result = null;

            var filePath = sourceCodeFile.RelativeName;

            if (sourceCodeFile.Code != null)
            {
                var errorListener = new AntlrMemoryErrorListener();
                errorListener.CodeFile   = sourceCodeFile;
                errorListener.Logger     = Logger;
                errorListener.LineOffset = LineOffset;
                try
                {
                    var preprocessedText = PreprocessText(sourceCodeFile);
                    AntlrInputStream inputStream;
                    if (Language.IsCaseInsensitive)
                    {
                        inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType);
                    }
                    else
                    {
                        inputStream = new AntlrInputStream(preprocessedText);
                    }
                    inputStream.name = filePath;

                    Lexer lexer = InitLexer(inputStream);
                    lexer.Interpreter = new LexerATNSimulator(lexer, GetOrCreateAtn(true));
                    lexer.RemoveErrorListeners();
                    lexer.AddErrorListener(errorListener);
                    var commentTokens = new List <IToken>();

                    var            stopwatch = Stopwatch.StartNew();
                    IList <IToken> tokens    = lexer.GetAllTokens();
                    stopwatch.Stop();
                    long lexerTimeSpanTicks = stopwatch.ElapsedTicks;

                    foreach (IToken token in tokens)
                    {
                        if (token.Channel == CommentsChannel)
                        {
                            commentTokens.Add(token);
                        }
                    }

                    stopwatch.Restart();
                    var codeTokenSource          = new ListTokenSource(tokens);
                    var codeTokenStream          = new CommonTokenStream(codeTokenSource);
                    ParserRuleContext syntaxTree = ParseTokens(sourceCodeFile, errorListener, codeTokenStream);
                    stopwatch.Stop();
                    long parserTimeSpanTicks = stopwatch.ElapsedTicks;

                    result = Create(syntaxTree);
                    result.LexerTimeSpan  = new TimeSpan(lexerTimeSpanTicks);
                    result.ParserTimeSpan = new TimeSpan(parserTimeSpanTicks);
                    result.Tokens         = tokens;
                    result.Comments       = commentTokens;
                }
                catch (Exception ex) when(!(ex is ThreadAbortException))
                {
                    Logger.LogError(new ParsingException(sourceCodeFile, ex));

                    if (result == null)
                    {
                        result = Create(null);
                    }
                }
                finally
                {
                    Interlocked.Increment(ref processedFilesCount);
                    Interlocked.Add(ref processedBytesCount, sourceCodeFile.Code.Length);
                }
            }
            else
            {
                result = Create(null);
            }
            result.SourceCodeFile = sourceCodeFile;

            return(result);
        }
Exemple #10
0
        protected virtual ParseTree TokenizeAndParse(SourceCodeFile sourceCodeFile)
        {
            AntlrParseTree result = null;

            var filePath = Path.Combine(sourceCodeFile.RelativePath, sourceCodeFile.Name);

            if (sourceCodeFile.Code != null)
            {
                var errorListener = new AntlrMemoryErrorListener();
                errorListener.FileName   = filePath;
                errorListener.FileData   = sourceCodeFile.Code;
                errorListener.Logger     = Logger;
                errorListener.LineOffset = sourceCodeFile.LineOffset;
                try
                {
                    var preprocessedText = PreprocessText(sourceCodeFile);
                    AntlrInputStream inputStream;
                    if (Language.IsCaseInsensitive())
                    {
                        inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType);
                    }
                    else
                    {
                        inputStream = new AntlrInputStream(preprocessedText);
                    }
                    inputStream.name = filePath;

                    Lexer lexer = InitLexer(inputStream);
                    Lexer = lexer;
                    lexer.RemoveErrorListeners();
                    lexer.AddErrorListener(errorListener);
                    var commentTokens = new List <IToken>();

                    var            stopwatch = Stopwatch.StartNew();
                    IList <IToken> tokens    = GetAllTokens(lexer);
                    stopwatch.Stop();
                    long lexerTimeSpanTicks = stopwatch.ElapsedTicks;

#if DEBUG
                    var codeTokensStr = AntlrHelper.GetTokensString(tokens, Vocabulary, onlyDefaultChannel: false);
#endif

                    ClearLexerCacheIfRequired(lexer);

                    foreach (var token in tokens)
                    {
                        if (token.Channel == CommentsChannel)
                        {
                            commentTokens.Add(token);
                        }
                    }

                    stopwatch.Restart();
                    var codeTokenSource          = new ListTokenSource(tokens);
                    var codeTokenStream          = new CommonTokenStream(codeTokenSource);
                    ParserRuleContext syntaxTree = ParseTokens(sourceCodeFile, errorListener, codeTokenStream);
                    stopwatch.Stop();
                    long parserTimeSpanTicks = stopwatch.ElapsedTicks;

                    IncrementProcessedFilesCount();

                    result = Create(syntaxTree);
                    result.LexerTimeSpan  = new TimeSpan(lexerTimeSpanTicks);
                    result.ParserTimeSpan = new TimeSpan(parserTimeSpanTicks);
                    result.Tokens         = tokens;
                    result.Comments       = commentTokens;
                }
                catch (Exception ex)
                {
                    Logger.LogError(new ParsingException(filePath, ex));

                    if (result == null)
                    {
                        result = Create(null);
                    }
                }
            }
            else
            {
                result = Create(null);
            }
            result.FileName = filePath;
            result.FileData = sourceCodeFile.Code;

            return(result);
        }
Exemple #11
0
        static void Try(string ffn)
        {
            var           sourceCode        = System.IO.File.ReadAllText(ffn);
            List <IToken> codeTokens        = new List <IToken>();
            List <IToken> commentTokens     = new List <IToken>();
            Lexer         preprocessorLexer = new CSharpLexer(new AntlrInputStream(sourceCode));

            // Collect all tokens with lexer (CSharpLexer.g4).
            var tokens = preprocessorLexer.GetAllTokens();

            var directiveTokens      = new List <IToken>();
            var directiveTokenSource = new ListTokenSource(directiveTokens);
            var directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE);
            CSharpPreprocessorParser preprocessorParser = new CSharpPreprocessorParser(directiveTokenStream);
            int  index           = 0;
            bool compiliedTokens = true;

            while (index < tokens.Count)
            {
                var token = tokens[index];
                if (token.Type == CSharpLexer.SHARP)
                {
                    directiveTokens.Clear();
                    int directiveTokenIndex = index + 1;
                    // Collect all preprocessor directive tokens.
                    while (directiveTokenIndex < tokens.Count &&
                           tokens[directiveTokenIndex].Type != CSharpLexer.Eof &&
                           tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE &&
                           tokens[directiveTokenIndex].Type != CSharpLexer.SHARP)
                    {
                        if (tokens[directiveTokenIndex].Channel == CSharpLexer.COMMENTS_CHANNEL)
                        {
                            commentTokens.Add(tokens[directiveTokenIndex]);
                        }
                        else if (tokens[directiveTokenIndex].Channel != Lexer.Hidden)
                        {
                            directiveTokens.Add(tokens[directiveTokenIndex]);
                        }
                        directiveTokenIndex++;
                    }

                    directiveTokenSource           = new ListTokenSource(directiveTokens);
                    directiveTokenStream           = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE);
                    preprocessorParser.TokenStream = directiveTokenStream;
                    preprocessorParser.Reset();
                    // Parse condition in preprocessor directive (based on CSharpPreprocessorParser.g4 grammar).
                    CSharpPreprocessorParser.Preprocessor_directiveContext directive = preprocessorParser.preprocessor_directive();
                    // if true than next code is valid and not ignored.
                    compiliedTokens = directive.value;
                    var directiveStr = tokens[index + 1].Text.Trim();
                    if ("line".Equals(directiveStr) || "error".Equals(directiveStr) || "warning".Equals(directiveStr) || "define".Equals(directiveStr) || "endregion".Equals(directiveStr) || "endif".Equals(directiveStr) || "pragma".Equals(directiveStr))
                    {
                        compiliedTokens = true;
                    }
                    string conditionalSymbol = null;
                    if ("define".Equals(tokens[index + 1].Text))
                    {
                        // add to the conditional symbols
                        conditionalSymbol = tokens[index + 2].Text;
                        preprocessorParser.ConditionalSymbols.Add(conditionalSymbol);
                    }
                    if ("undef".Equals(tokens[index + 1].Text))
                    {
                        conditionalSymbol = tokens[index + 2].Text;
                        preprocessorParser.ConditionalSymbols.Remove(conditionalSymbol);
                    }
                    index = directiveTokenIndex - 1;
                }
                else if (token.Channel == CSharpLexer.COMMENTS_CHANNEL)
                {
                    commentTokens.Add(token); // Colect comment tokens (if required).
                }
                else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE && compiliedTokens)
                {
                    codeTokens.Add(token); // Collect code tokens.
                }
                index++;
            }

            // At second stage tokens parsed in usual way.
            var          codeTokenSource = new ListTokenSource(codeTokens);
            var          codeTokenStream = new CommonTokenStream(codeTokenSource);
            CSharpParser parser          = new CSharpParser(codeTokenStream);
            // Parse syntax tree (CSharpParser.g4)
            var listener = new ErrorListener <IToken>(parser, preprocessorLexer, codeTokenStream);

            parser.AddErrorListener(listener);
            var tree = parser.compilation_unit();

            if (listener.had_error)
            {
                System.Console.WriteLine("error in parse.");
            }
            else
            {
                System.Console.WriteLine("parse completed.");
            }
            System.Console.WriteLine(codeTokenStream.OutputTokens(preprocessorLexer));
            System.Console.WriteLine(tree.OutputTree(codeTokenStream, preprocessorLexer));
        }
Exemple #12
0
        private void ProcessGrammarFile(Grammar grammar, string grammarFileName,
                                        AntlrErrorListener antlrErrorListener, CancellationToken cancellationToken)
        {
            string code        = File.ReadAllText(Path.Combine(grammar.Directory, grammarFileName));
            var    inputStream = new AntlrInputStream(code);
            var    codeSource  = new CodeSource(grammarFileName, inputStream.ToString());

            _result.GrammarFilesData.Add(grammarFileName, codeSource);

            string extension = Path.GetExtension(grammarFileName);

            if (extension != Grammar.AntlrDotExt)
            {
                return;
            }

            antlrErrorListener.CodeSource = codeSource;
            var antlr4Lexer = new ANTLRv4Lexer(inputStream);

            antlr4Lexer.RemoveErrorListeners();
            antlr4Lexer.AddErrorListener(antlrErrorListener);
            var tokens          = antlr4Lexer.GetAllTokens();
            var codeTokenSource = new ListTokenSource(tokens);

            cancellationToken.ThrowIfCancellationRequested();

            var codeTokenStream = new CommonTokenStream(codeTokenSource);
            var antlr4Parser    = new ANTLRv4Parser(codeTokenStream);

            antlr4Parser.RemoveErrorListeners();
            antlr4Parser.AddErrorListener(antlrErrorListener);

            var tree = antlr4Parser.grammarSpec();

            var grammarInfoCollectorListener = new GrammarInfoCollectorListener();

            grammarInfoCollectorListener.CollectInfo(antlrErrorListener.CodeSource, tree);

            var shortFileName = Path.GetFileNameWithoutExtension(grammarFileName);

            _result.GrammarActionsTextSpan[grammarFileName] = grammarInfoCollectorListener.CodeInsertions;

            var grammarType = grammarInfoCollectorListener.GrammarType;

            if (grammarType == GrammarType.Lexer || grammarType == GrammarType.Combined)
            {
                _result.LexerSuperClass = grammarInfoCollectorListener.SuperClass;
            }

            if (grammarType == GrammarType.Separated || grammarType == GrammarType.Combined)
            {
                _result.ParserSuperClass = grammarInfoCollectorListener.SuperClass;
                _result.Rules            = grammarInfoCollectorListener.Rules;
            }

            void ErrorAction(ParsingError parsingError)
            {
                ErrorEvent?.Invoke(this, parsingError);
                _result.Errors.Add(parsingError);
            }

            var caseInsensitiveTypeOptionMatcher = new CaseInsensitiveTypeOptionMatcher(codeSource, grammarType, ErrorAction);
            var runtimeOptionMatcher             = new RuntimeOptionMatcher(codeSource, grammarType, ErrorAction);
            var visitorOptionMatcher             = new VisitorOptionMatcher(codeSource, grammarType, ErrorAction);
            var listenerOptionMatcher            = new ListenerOptionMatcher(codeSource, grammarType, ErrorAction);
            var packageOptionMatcher             = new PackageOptionMatcher(codeSource, grammarType, ErrorAction);
            var rootOptionMatcher       = new RootOptionMatcher(codeSource, grammarType, ErrorAction, _result.Rules);
            var predictionOptionMatcher = new PredictionModeOptionMatcher(codeSource, grammarType, ErrorAction);

            foreach (IToken token in tokens)
            {
                if (token.Type == ANTLRv4Lexer.LINE_COMMENT || token.Type == ANTLRv4Lexer.BLOCK_COMMENT)
                {
                    if (caseInsensitiveTypeOptionMatcher.Match(token, out var caseInsensitiveType))
                    {
                        _result.CaseInsensitiveType = caseInsensitiveType;
                        continue;
                    }

                    if (runtimeOptionMatcher.Match(token, out Runtime runtime))
                    {
                        _result.Runtime = runtime;
                        continue;
                    }

                    if (packageOptionMatcher.Match(token, out string package))
                    {
                        _result.Package = package;
                        continue;
                    }

                    if (visitorOptionMatcher.Match(token, out bool generateVisitor))
                    {
                        _result.Visitor = generateVisitor;
                        continue;
                    }

                    if (listenerOptionMatcher.Match(token, out bool generateListener))
                    {
                        _result.Listener = generateListener;
                        continue;
                    }

                    if (rootOptionMatcher.Match(token, out string root))
                    {
                        _result.Root = root;
                        continue;
                    }

                    if (predictionOptionMatcher.Match(token, out PredictionMode predictionMode))
                    {
                        _result.PredictionMode = predictionMode;
                        continue;
                    }
                }
            }
        }
Exemple #13
0
    public static void Main(string[] args)
    {
        try {
            string[] files = Directory.GetFiles(".", "*.cs");
            foreach (string file in files)
            {
                Console.WriteLine(file + "---------------------------------------");

                //Read the file
                string text = File.ReadAllText(file);

                //Create the lexer
                CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(text));

                var           tokens        = lexer.GetAllTokens();
                List <IToken> codeTokens    = new List <IToken>();
                List <IToken> commentTokens = new List <IToken>();

                var directiveTokens      = new List <IToken>();
                var directiveTokenSource = new ListTokenSource(directiveTokens);
                var directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE);
                CSharpPreprocessorParser preprocessorParser = new CSharpPreprocessorParser(directiveTokenStream);

                int  index           = 0;
                bool compiliedTokens = true;
                while (index < tokens.Count)
                {
                    var token = tokens[index];
                    if (token.Type == CSharpLexer.SHARP)
                    {
                        directiveTokens.Clear();
                        int directiveTokenIndex = index + 1;
                        // Collect all preprocessor directive tokens.
                        while (directiveTokenIndex < tokens.Count &&
                               tokens[directiveTokenIndex].Type != CSharpLexer.Eof &&
                               tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE &&
                               tokens[directiveTokenIndex].Type != CSharpLexer.SHARP)
                        {
                            if (tokens[directiveTokenIndex].Channel == CSharpLexer.COMMENTS_CHANNEL)
                            {
                                commentTokens.Add(tokens[directiveTokenIndex]);
                            }
                            else if (tokens[directiveTokenIndex].Channel != Lexer.Hidden)
                            {
                                //Console.WriteLine(allTokens[directiveTokenIndex] + "  HOLA");
                                directiveTokens.Add(tokens[directiveTokenIndex]);
                            }
                            directiveTokenIndex++;
                        }

                        directiveTokenSource           = new ListTokenSource(directiveTokens);
                        directiveTokenStream           = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE);
                        preprocessorParser.TokenStream = directiveTokenStream;
                        //preprocessorParser.SetInputStream(directiveTokenStream);
                        preprocessorParser.Reset();
                        // Parse condition in preprocessor directive (based on CSharpPreprocessorParser.g4 grammar).
                        CSharpPreprocessorParser.Preprocessor_directiveContext directive = preprocessorParser.preprocessor_directive();
                        // if true than next code is valid and not ignored.
                        compiliedTokens = directive.value;

                        String directiveStr = tokens[index + 1].Text.Trim();
                        if ("line".Equals(directiveStr) || "error".Equals(directiveStr) || "warning".Equals(directiveStr) || "define".Equals(directiveStr) || "endregion".Equals(directiveStr) || "endif".Equals(directiveStr) || "pragma".Equals(directiveStr))
                        {
                            //Console.WriteLine(directiveStr);
                            compiliedTokens = true;
                        }
                        String conditionalSymbol = null;
                        if ("define".Equals(tokens[index + 1].Text))
                        {
                            // add to the conditional symbols
                            conditionalSymbol = tokens[index + 2].Text;
                            preprocessorParser.ConditionalSymbols.Add(conditionalSymbol);
                        }
                        if ("undef".Equals(tokens[index + 1].Text))
                        {
                            conditionalSymbol = tokens[index + 2].Text;
                            preprocessorParser.ConditionalSymbols.Remove(conditionalSymbol);
                        }

                        //This code deletes the directive tokens from the input so that they don't interfere with the parsing process
                        // In all of the cases, we have to remove at least two positions of the tokens array
                        tokens.RemoveAt(directiveTokenIndex - 1);
                        tokens.RemoveAt(directiveTokenIndex - 2);

                        if ("pragma".Equals(directiveStr) || "warning".Equals(directiveStr) || "region".Equals(directiveStr) || "error".Equals(directiveStr))
                        {
                            // Remove three positions before
                            tokens.RemoveAt(directiveTokenIndex - 3);
                            directiveTokenIndex--;
                        }
                        else if ("define".Equals(directiveStr) || "undef".Equals(directiveStr) || "if".Equals(directiveStr) || "elif".Equals(directiveStr) || "line".Equals(directiveStr))
                        {
                            // Remove four positions before
                            tokens.RemoveAt(directiveTokenIndex - 3);
                            tokens.RemoveAt(directiveTokenIndex - 4);
                            directiveTokenIndex -= 2;
                        }
                        directiveTokenIndex -= 2;
                        index = directiveTokenIndex - 1;
                    }
                    else if (token.Channel == CSharpLexer.COMMENTS_CHANNEL)
                    {
                        commentTokens.Add(token); // Colect comment tokens (if required).
                    }
                    else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE && compiliedTokens)
                    {
                        codeTokens.Add(token); // Collect code tokens.
                    }
                    index++;
                }

                // At second stage tokens parsed in usual way.
                var          codeTokenSource = new ListTokenSource(tokens);
                var          codeTokenStream = new CommonTokenStream(codeTokenSource);
                CSharpParser parser          = new CSharpParser(codeTokenStream);

                ////Create the token stream
                //CommonTokenStream tokens = new CommonTokenStream(lexer);
                //CSharpParser parser = new CSharpParser(tokens);
                IParseTree tree = parser.compilation_unit();

                ////Walk the tree
                ParseTreeWalker walker = new ParseTreeWalker();
                walker.Walk(new ProgressPrinter(), tree);
            }
        }
        catch (Exception e) {
            Console.WriteLine("Error (Program.cs): " + e);
        }
    }
Exemple #14
0
        public GrammarCheckedState Check(InputState inputState, CancellationToken cancellationToken = default)
        {
            var grammar = inputState.Grammar;
            var result  = new GrammarCheckedState(inputState);

            try
            {
                var antlrErrorListener = new AntlrErrorListener();
                antlrErrorListener.ErrorEvent += ErrorEvent;
                antlrErrorListener.ErrorEvent += (sender, error) =>
                {
                    lock (result.Errors)
                    {
                        result.Errors.Add(error);
                    }
                };

                foreach (string grammarFileName in grammar.Files)
                {
                    string code        = File.ReadAllText(Path.Combine(grammar.Directory, grammarFileName));
                    var    inputStream = new AntlrInputStream(code);
                    var    codeSource  = new CodeSource(grammarFileName, inputStream.ToString());
                    result.GrammarFilesData.Add(grammarFileName, codeSource);

                    string extension = Path.GetExtension(grammarFileName);
                    if (extension != Grammar.AntlrDotExt)
                    {
                        continue;
                    }

                    antlrErrorListener.CodeSource = codeSource;
                    var antlr4Lexer = new ANTLRv4Lexer(inputStream);
                    antlr4Lexer.RemoveErrorListeners();
                    antlr4Lexer.AddErrorListener(antlrErrorListener);
                    var codeTokenSource = new ListTokenSource(antlr4Lexer.GetAllTokens());

                    cancellationToken.ThrowIfCancellationRequested();

                    var codeTokenStream = new CommonTokenStream(codeTokenSource);
                    var antlr4Parser    = new ANTLRv4Parser(codeTokenStream);

                    antlr4Parser.RemoveErrorListeners();
                    antlr4Parser.AddErrorListener(antlrErrorListener);

                    var tree = antlr4Parser.grammarSpec();

                    var grammarInfoCollectorListener = new GrammarInfoCollectorListener();
                    grammarInfoCollectorListener.CollectInfo(antlrErrorListener.CodeSource, tree);

                    var shortFileName = Path.GetFileNameWithoutExtension(grammarFileName);
                    result.GrammarActionsTextSpan[grammarFileName] = grammarInfoCollectorListener.CodeInsertions;

                    if (grammarFileName.Contains(Grammar.LexerPostfix))
                    {
                        result.LexerSuperClass = grammarInfoCollectorListener.SuperClass;
                    }

                    if (grammarFileName.Contains(Grammar.ParserPostfix))
                    {
                        result.ParserSuperClass = grammarInfoCollectorListener.SuperClass;
                    }

                    if (!shortFileName.Contains(Grammar.LexerPostfix))
                    {
                        result.Rules = grammarInfoCollectorListener.Rules;
                        cancellationToken.ThrowIfCancellationRequested();
                    }
                }
            }
            catch (Exception ex)
            {
                result.Exception = ex;
                if (!(ex is OperationCanceledException))
                {
                    ErrorEvent?.Invoke(this, new ParsingError(ex, WorkflowStage.GrammarChecked));
                }
            }

            return(result);
        }