GetTokenType() public méthode

public GetTokenType ( string tokenName ) : int
tokenName string
Résultat int
Exemple #1
0
 /** Pull your token definitions from an existing grammar in memory.
  *  You must use Grammar() ctor then this method then setGrammarContent()
  *  to make this work.  This was useful primarily for testing and
  *  interpreting grammars until I added import grammar functionality.
  *  When you import a grammar you implicitly import its vocabulary as well
  *  and keep the same token type values.
  *
  *  Returns the max token type found.
  */
 public virtual int ImportTokenVocabulary( Grammar importFromGr )
 {
     var importedTokenIDs = importFromGr.TokenIDs;
     foreach ( string tokenID in importedTokenIDs )
     {
         int tokenType = importFromGr.GetTokenType( tokenID );
         composite.MaxTokenType = Math.Max( composite.MaxTokenType, tokenType );
         if ( tokenType >= Label.MIN_TOKEN_TYPE )
         {
             //[email protected]("import token from grammar "+tokenID+"="+tokenType);
             DefineToken( tokenID, tokenType );
         }
     }
     return composite.MaxTokenType; // return max found
 }
        //throws Exception
        protected void checkSymbols( Grammar g,
                                    string rulesStr,
                                    string tokensStr )
        {
            var tokens = g.GetTokenDisplayNames();

            // make sure expected tokens are there
            //StringTokenizer st = new StringTokenizer( tokensStr, ", " );
            //while ( st.hasMoreTokens() )
            foreach ( string tokenName in tokensStr.Split( new string[] { ", " }, StringSplitOptions.RemoveEmptyEntries ) )
            {
                //String tokenName = st.nextToken();
                Assert.IsTrue(g.GetTokenType(tokenName) != Label.INVALID, "token " + tokenName + " expected");
                tokens.Remove( tokenName );
            }
            // make sure there are not any others (other than <EOF> etc...)
            foreach ( string tokenName in tokens )
            {
                Assert.IsTrue( g.GetTokenType( tokenName ) < Label.MIN_TOKEN_TYPE, "unexpected token name " + tokenName );
            }

            // make sure all expected rules are there
            //st = new StringTokenizer( rulesStr, ", " );
            int n = 0;
            //while ( st.hasMoreTokens() )
            foreach ( string ruleName in rulesStr.Split( new string[] { ", " }, StringSplitOptions.RemoveEmptyEntries ) )
            {
                //String ruleName = st.nextToken();
                Assert.IsNotNull(g.GetRule(ruleName), "rule " + ruleName + " expected");
                n++;
            }
            var rules = g.Rules;
            //System.out.println("rules="+rules);
            // make sure there are no extra rules
            Assert.AreEqual(n, rules.Count, "number of rules mismatch; expecting " + n + "; found " + rules.Count);
        }