Beispiel #1
0
        /// <summary>
        /// Expects that the next token is of expectedToken and if it is not will throw an error
        /// </summary>
        /// <param name="expectedToken">The expected token type</param>
        /// <returns>token data</returns>
        private KuickTokenizer.TokenData _consume(params KuickTokenizer.Token[] expectedTokens)
        {
            _voidAnyWhitespaceTokens();

            // if there is no next token
            if (_nextToken == KuickTokenizer.Token.NO_TOKEN)
            {
                // Throw sytax error
                throw new ParserSyntaxException("TokenConsumer: Unexpected end of input, expected `" + getTokenStrings(expectedTokens) + "`");
            }

            // If the token is not what we expected to come next
            if (expectedTokens.Contains(_nextToken.token) == false)
            {
                // Throw sytax error
                throw new ParserSyntaxException("TokenConsumer: Unexpected token `" + getTokenStrings(_nextToken.token) + "`, expected `" + getTokenStrings(expectedTokens) + "`");
            }


            // Cache next token
            KuickTokenizer.TokenData token = _nextToken;

            // Advance token loo-ahead
            _nextToken = _tokenizer.readToken();

            // Return our cached token to caller
            return(token);
        }
Beispiel #2
0
 private void _voidAnyWhitespaceTokens()
 {
     // if its a null token advance consuming all null tokens until non null token found (Whitespace and/or Comments for example)
     while (_nextToken == KuickTokenizer.Token.NULL || _nextToken == KuickTokenizer.Token.WHITESPACE || _nextToken == KuickTokenizer.Token.COMMENT)
     {
         // Advance token loo-ahead
         _nextToken = _tokenizer.readToken();
     }
 }
Beispiel #3
0
        /// <summary>
        /// Parse a sting into internal format
        /// </summary>
        /// <param name="str">String to be parsed</param>
        /// <returns></returns>
        public ParseData parse(string str)
        {
            _string = str;
            _tokenizer.load(str);

            // prime the look-ahead
            _nextToken = _tokenizer.readToken();
            _voidAnyWhitespaceTokens();

            // Initiate recurive parse and return the result
            return(Page());
        }