Exemplo n.º 1
0
    public static void Main(string[] args)
    {
        try {
            TinyCLexer lexer = null;

            if (args.Length > 0)
            {
                lexer = new TinyCLexer(new StreamReader(new FileStream(args[0], FileMode.Open, FileAccess.Read)));
            }
            else
            {
                lexer = new TinyCLexer(new ByteBuffer(Console.OpenStandardInput()));
            }
            lexer.setTokenObjectClass(typeof(TokenWithIndex).FullName);
            TokenStreamRewriteEngine rewriteEngine = new TokenStreamRewriteEngine(lexer);
            rewriteEngine.discard(TinyCLexer.WS);
            TinyCParser parser = new TinyCParser(rewriteEngine);
            parser.program();
            Console.Out.Write(rewriteEngine.ToString());
        }
        catch (Exception e)
        {
            Console.Error.WriteLine("exception: " + e);
        }
    }
Exemplo n.º 2
0
 public GrammarAST ParseArtificialRule(string ruleText)
 {
     ANTLRLexer lexer = new ANTLRLexer(new Antlr.Runtime.ANTLRStringStream(ruleText));
     TokenStreamRewriteEngine tokbuf = new TokenStreamRewriteEngine(lexer);
     tokbuf.Discard(ANTLRParser.WS, ANTLRParser.ML_COMMENT, ANTLRParser.COMMENT, ANTLRParser.SL_COMMENT);
     ANTLRParser parser = new ANTLRParser(new Antlr.Runtime.CommonTokenStream(tokbuf));
     parser.Grammar = this;
     parser.GrammarType = this.type;
     try
     {
         Antlr.Runtime.IAstRuleReturnScope<GrammarAST> result = parser.rule();
         return result.Tree;
     }
     catch (Exception e)
     {
         ErrorManager.Error(ErrorManager.MSG_ERROR_CREATING_ARTIFICIAL_RULE, e);
         return null;
     }
 }
Exemplo n.º 3
0
        /** Parse a rule we add artificially that is a list of the other lexer
         *  rules like this: "Tokens : ID | INT | SEMI ;"  nextToken() will invoke
         *  this to set the current token.  Add char literals before
         *  the rule references.
         *
         *  If in filter mode, we want every alt to backtrack and we need to
         *  do k=1 to force the "first token def wins" rule.  Otherwise, the
         *  longest-match rule comes into play with LL(*).
         *
         *  The ANTLRParser antlr.g file now invokes this when parsing a lexer
         *  grammar, which I think is proper even though it peeks at the info
         *  that later phases will (re)compute.  It gets a list of lexer rules
         *  and builds a string representing the rule; then it creates a parser
         *  and adds the resulting tree to the grammar's tree.
         */
        public GrammarAST AddArtificialMatchTokensRule( GrammarAST grammarAST,
                                                       IList<string> ruleNames,
                                                       IList<string> delegateNames,
                                                       bool filterMode )
        {
            StringTemplate matchTokenRuleST = null;
            if ( filterMode )
            {
                matchTokenRuleST = new StringTemplate(
                        ArtificialTokensRuleName +
                        " options {k=1; backtrack=true;} : <rules; separator=\"|\">;",
                        typeof( AngleBracketTemplateLexer ) );
            }
            else
            {
                matchTokenRuleST = new StringTemplate(
                        ArtificialTokensRuleName + " : <rules; separator=\"|\">;",
                        typeof( AngleBracketTemplateLexer ) );
            }

            // Now add token rule references
            for ( int i = 0; i < ruleNames.Count; i++ )
            {
                string rname = (string)ruleNames[i];
                matchTokenRuleST.SetAttribute( "rules", rname );
            }
            for ( int i = 0; i < delegateNames.Count; i++ )
            {
                string dname = (string)delegateNames[i];
                matchTokenRuleST.SetAttribute( "rules", dname + ".Tokens" );
            }
            //[email protected]("tokens rule: "+matchTokenRuleST.toString());

            //ANTLRLexer lexer = new ANTLRLexer( new StringReader( matchTokenRuleST.toString() ) );
            //lexer.setTokenObjectClass( "antlr.TokenWithIndex" );
            //TokenStreamRewriteEngine tokbuf =
            //    new TokenStreamRewriteEngine( lexer );
            //tokbuf.discard( ANTLRParser.WS );
            //tokbuf.discard( ANTLRParser.ML_COMMENT );
            //tokbuf.discard( ANTLRParser.COMMENT );
            //tokbuf.discard( ANTLRParser.SL_COMMENT );
            //ANTLRParser parser = new ANTLRParser( tokbuf );
            ANTLRLexer lexer = new ANTLRLexer( new Antlr.Runtime.ANTLRStringStream( matchTokenRuleST.ToString() ) );
            TokenStreamRewriteEngine tokbuf = new TokenStreamRewriteEngine( lexer );
            tokbuf.Discard( ANTLRParser.WS, ANTLRParser.ML_COMMENT, ANTLRParser.COMMENT, ANTLRParser.SL_COMMENT );
            ANTLRParser parser = new ANTLRParser( new Antlr.Runtime.CommonTokenStream( tokbuf ) );

            parser.Grammar = this;
            parser.GrammarType = GrammarType.Lexer;
            ANTLRParser.rule_return result = null;
            try
            {
                result = parser.rule();
                if ( Tool.internalOption_PrintGrammarTree )
                {
                    Console.Out.WriteLine( "Tokens rule: " + ( (ITree)result.Tree ).ToStringTree() );
                }
                GrammarAST p = grammarAST;
                while ( p.Type != ANTLRParser.LEXER_GRAMMAR )
                {
                    p = (GrammarAST)p.getNextSibling();
                }
                p.AddChild( (Antlr.Runtime.Tree.ITree)result.Tree );
            }
            catch ( Exception e )
            {
                ErrorManager.Error( ErrorManager.MSG_ERROR_CREATING_ARTIFICIAL_RULE,
                                   e );
            }
            return (GrammarAST)result.Tree;
        }