ReadToken() public method

public ReadToken ( ) : Token
return Token
示例#1
0
文件: TokenizerTest.cs 项目: hww/VARP
    private void OnValidate()
    {
        System.Text.StringBuilder sb;
        // ------------------------------------------------------------------
        // Just tokenized it
        // ------------------------------------------------------------------
        sb    = new System.Text.StringBuilder();
        lexer = new Tokenizer(new StringReader(testString), "TokenizerTest");
        var token = lexer.ReadToken();

        while (token != null)
        {
            sb.Append(Inspector.Inspect(token) + " ");
            token = lexer.ReadToken();
        }
        tokensString = sb.ToString();

        // ------------------------------------------------------------------
        // Parse scheme
        // ------------------------------------------------------------------
        lexer = new Tokenizer(new StringReader(testString), "TokenizerTest");
        sb    = new System.Text.StringBuilder();
        do
        {
            SObject result = Parser.Parse(lexer);
            if (result == null)
            {
                break;
            }
            sb.AppendLine(result.Inspect());
        } while (lexer.LastToken != null);
        resultString = sb.ToString();
    }
示例#2
0
        private static Syntax ParseVector(Token thisToken, Tokenizer moreTokens)
        {
            var   listContents = new ListSyntax( );
            Token dotToken     = null;

            var nextToken = moreTokens.ReadToken();

            while (nextToken != null && nextToken.type != TokenType.CloseBracket)
            {
                // Parse this token
                listContents.Add(ParseToken(nextToken, moreTokens));

                // Fetch the next token
                nextToken = moreTokens.ReadToken();
                if (nextToken == null)
                {
                    throw ParserError.SyntaxError("parser", "Improperly formed list.", dotToken);
                }

                //if (!improper && nextToken.Type == TokenType.Symbol && dotSymbol.Equals(nextToken.Value) && thisToken.Type == TokenType.OpenBracket)
                if (nextToken.type == TokenType.Dot)
                {
                    throw ParserError.SyntaxError("parser", "Improperly formed dotted list", nextToken);
                }
            }

            if (nextToken == null) // Missing ')'
            {
                throw ParserError.SyntaxError("parser", "Missing close parenthesis", thisToken);
            }

            return(Syntax.Create(listContents, thisToken.location));
        }
示例#3
0
        void Test(string source, string expectedResult)
        {
            // Just tokenized it
            System.Text.StringBuilder sb = new System.Text.StringBuilder();
            Tokenizer lexer = new Tokenizer(new StringReader(source), "TokenizerTest");
            Token     token = lexer.ReadToken();

            bool addSpace = false;

            while (token != null)
            {
                if (addSpace)
                {
                    sb.Append(" ");
                }
                else
                {
                    addSpace = true;
                }
                sb.Append(token.value + ":" + token.type.ToString());
                token = lexer.ReadToken();
            }

            string result = sb.ToString();

            Assert.AreEqual(expectedResult, result);
        }
示例#4
0
    private void OnValidate()
    {
        System.Text.StringBuilder sb;
        try
        {
            // ------------------------------------------------------------------
            // Just tokenized it
            // ------------------------------------------------------------------
            sb    = new System.Text.StringBuilder();
            lexer = new Tokenizer(new StringReader(testString), "TokenizerTest");
            var token = lexer.ReadToken();
            while (token != null)
            {
                sb.Append(Inspector.Inspect(token) + " ");
                token = lexer.ReadToken();
            }
            tokensString = sb.ToString();

            // ------------------------------------------------------------------
            // Parse scheme
            // ------------------------------------------------------------------
            lexer = new Tokenizer(new StringReader(testString), "TokenizerTest");
            sb    = new System.Text.StringBuilder();
            do
            {
                SObject result = Parser.Parse(lexer);
                if (result == null)
                {
                    break;
                }
                sb.AppendLine(result.Inspect());
            } while (lexer.LastToken != null);
            syntaxString = sb.ToString();

            // ------------------------------------------------------------------
            // Parse scheme
            // ------------------------------------------------------------------

            lexer = new Tokenizer(new StringReader(testString), "TokenizerTest");

            sb = new System.Text.StringBuilder();
            do
            {
                var result = Parser.Parse(lexer);
                if (result == null)
                {
                    break;
                }
                var ast = AstBuilder.Expand(result, SystemEnvironment.Top);
                sb.AppendLine(ast.Inspect());
            } while (lexer.LastToken != null);
            astString = sb.ToString();
            envString = SystemEnvironment.Top.Inspect();
        }
        catch (SchemeError ex)
        {
            astString = ex.Message;
            throw ex;
        }
    }
示例#5
0
        public void ReadToken_IgnoresWhitespace(string input)
        {
            // Arrange
            ITokenizer tokenizer = new Tokenizer(input);

            // Act
            Token token1 = tokenizer.ReadToken();
            Token token2 = tokenizer.ReadToken();
            Token token3 = tokenizer.ReadToken();

            // Assert
            Assert.AreEqual(TokenType.Number, token1.Type);
            Assert.AreEqual(TokenType.Number, token2.Type);
            Assert.AreEqual(TokenType.EndOfCode, token3.Type);
        }
示例#6
0
        private static Token RunTokenizer(string source)
        {
            var tokenizer = new Tokenizer(new StringReader(source));

            tokenizer.ReadToken();
            return(tokenizer.Lookahead);
        }
示例#7
0
        /// <summary>
        /// Works out how many brackets are missing for the expression given by the Tokenizer
        /// </summary>
        /// <param name="reader">The reader to read expressions from</param>
        /// <returns>The number of closing parenthesizes that are required to complete the expression (-1 if there are too many)</returns>
        public static int RemainingBrackets(Tokenizer reader)
        {
            var   bracketCount = 0;
            Token thisToken;

            try
            {
                thisToken = reader.ReadToken();
            }
            catch (ParserError)
            {
                thisToken = new Token(TokenType.BadSyntax, "", null);
            }
            catch (ArithmeticException)
            {
                thisToken = new Token(TokenType.BadNumber, "", null);
            }

            while (thisToken != null)
            {
                switch (thisToken.type)
                {
                case TokenType.OpenBracket:
                case TokenType.OpenVector:
                    // If this begins a list or a vector, increase the bracket count
                    bracketCount++;
                    break;

                case TokenType.CloseBracket:
                    // Close brackets indicate the end of a list or vector
                    bracketCount--;
                    break;
                }

                // Get the next token
                thisToken = reader.ReadToken();
            }

            // Set the count to -1 if there were too many brackets
            if (bracketCount < 0)
            {
                bracketCount = -1;
            }

            return(bracketCount);
        }
示例#8
0
        public void ReadIdentifier_Exception(string input)
        {
            // Arrange
            ITokenizer tokenizer = new Tokenizer(input);

            // Act & Assert
            LangException exception = Assert.Throws <LangException>(() => tokenizer.ReadToken());

            Assert.AreEqual(LangException.ErrorCode.UNEXPECTED_TOKEN, exception.ErrorType);
        }
示例#9
0
        /// <summary>
        /// Turns the contents of a Tokenizer into an object
        /// </summary>
        /// <remarks>
        /// Recursive-descent via ParseToken. There may be tokens left in the reader.
        /// </remarks>
        public static Syntax Parse(Tokenizer reader)
        {
            var firstToken = reader.ReadToken();

            if (firstToken == null)
            {
                return(null);
            }
            return(ParseToken(firstToken, reader));
        }
示例#10
0
    private void OnValidate( )
    {
        Name.Init( );
        System.Text.StringBuilder sb;

        // ------------------------------------------------------------------
        // Just tokenized it
        // ------------------------------------------------------------------
        sb    = new System.Text.StringBuilder( );
        lexer = new Tokenizer(new StringReader(testString), "TokenizerTest");
        var token = lexer.ReadToken( );

        while (token != null)
        {
            sb.Append(Inspector.InspectObject(token) + " ");
            token = lexer.ReadToken( );
        }
        tokensString = sb.ToString( );
    }
示例#11
0
        private static IEnumerable <TokenInfo> GetLineTokens(ITextSnapshotLine line, PythonLanguageVersion version)
        {
            var sourceSpan = new SnapshotSpanSourceCodeReader(line.Extent);
            var tokenizer  = new Tokenizer(version, options: TokenizerOptions.VerbatimCommentsAndLineJoins);

            tokenizer.Initialize(sourceSpan);
            for (var t = tokenizer.ReadToken(); t.Category != TokenCategory.EndOfStream; t = tokenizer.ReadToken())
            {
                yield return(t);
            }
        }
示例#12
0
        public void ReadOperator(string input)
        {
            // Arrange
            ITokenizer tokenizer = new Tokenizer(input);

            // Act
            Token token = tokenizer.ReadToken();

            // Assert
            Assert.AreEqual(TokenType.Operator, token.Type);
            Assert.AreEqual(input, token.Text);
        }
示例#13
0
        /// <summary>
        /// Parses a scheme expression in the default manner
        /// </summary>
        /// <returns>A scheme object</returns>
        /// <remarks>It is an error to pass scheme to this method with 'extraneous' tokens, such as trailing closing brackets</remarks>
        public static Syntax Parse(string scheme, string filepath)
        {
            var reader = new Tokenizer(new System.IO.StringReader(scheme), filepath);

            var res   = Parse(reader);
            var token = reader.ReadToken();

            if (token != null)
            {
                throw ParserError.SyntaxError("parser", "found extra tokens after the end of a scheme expression", token);
            }

            return(res);
        }
示例#14
0
        public static List <Token> ReadAllTokens(this string input)
        {
            ITokenizer   tokenizer = new Tokenizer(input);
            List <Token> tokens    = new List <Token>();

            Token t;

            do
            {
                t = tokenizer.ReadToken();
                tokens.Add(t);
            }while (t.Type != TokenType.EndOfCode);

            return(tokens);
        }
示例#15
0
        /// <summary>
        /// Read collection name and parameter (in case of system collections)
        /// </summary>
        public static string ParseCollection(Tokenizer tokenizer, out string name, out BsonValue options)
        {
            name = tokenizer.ReadToken().Expect(TokenType.Word).Value;

            // if collection starts with $, check if exist any parameter
            if (name.StartsWith("$"))
            {
                var next = tokenizer.LookAhead();

                if (next.Type == TokenType.OpenParenthesis)
                {
                    tokenizer.ReadToken(); // read (

                    if (tokenizer.LookAhead().Type == TokenType.CloseParenthesis)
                    {
                        options = null;
                    }
                    else
                    {
                        options = new JsonReader(tokenizer).Deserialize();
                    }

                    tokenizer.ReadToken().Expect(TokenType.CloseParenthesis); // read )
                }
                else
                {
                    options = null;
                }
            }
            else
            {
                options = null;
            }

            return(name + (options == null ? "" : "(" + JsonSerializer.Serialize(options) + ")"));
        }
示例#16
0
文件: Parser.cs 项目: hww/VARP
        public static Syntax ParseQuoted(Token thisToken, Tokenizer moreTokens)
        {
            Symbol quote = null;

            // First symbol is quote, unquote, quasiquote depending on what the token was
            switch (thisToken.Type)
            {
            case TokenType.Quote: quote = Symbol.QUOTE; break;

            case TokenType.Unquote: quote = Symbol.UNQUOTE; break;

            case TokenType.QuasiQuote: quote = Symbol.QUASIQUOTE; break;

            case TokenType.UnquoteSplicing: quote = Symbol.UNQUOTESPLICE; break;
            }
            var quote_stx = new Syntax(quote, thisToken);
            var nextToken = moreTokens.ReadToken();
            var quoted    = ParseToken(nextToken, moreTokens);
            var list      = new LinkedList <Syntax>();

            list.AddLast(quote_stx);
            list.AddLast(quoted);
            return(new Syntax(list, thisToken));
        }
示例#17
0
        public static Syntax ParseQuoted(Token thisToken, Tokenizer moreTokens)
        {
            EName quote = EName.None;

            // First symbol is quote, unquote, quasiquote depending on what the token was
            switch (thisToken.type)
            {
            case TokenType.Quote: quote = EName.Quote; break;

            case TokenType.Unquote: quote = EName.Unquote; break;

            case TokenType.QuasiQuote: quote = EName.Quasiquote; break;

            case TokenType.UnquoteSplicing: quote = EName.UnquoteSplicing; break;
            }
            var quote_stx = Syntax.Create((Name)quote, thisToken.location);
            var nextToken = moreTokens.ReadToken();
            var quoted    = ParseToken(nextToken, moreTokens);
            var list      = new LinkedList <Syntax>();

            list.AddLast(quote_stx);
            list.AddLast(quoted);
            return(Syntax.Create(list, thisToken.location));
        }
示例#18
0
        private static Syntax ParseList(Token thisToken, Tokenizer moreTokens)
        {
            // Is a list/vector
            var   listContents = new LinkedList <Syntax> ( );
            Token dotToken     = null;

            var nextToken = moreTokens.ReadToken();

            while (nextToken != null && nextToken.type != TokenType.CloseBracket)
            {
                // Parse this token
                listContents.AddLast(ParseToken(nextToken, moreTokens));

                // Fetch the next token
                nextToken = moreTokens.ReadToken();
                if (nextToken == null)
                {
                    throw ParserError.SyntaxError("parser", "Improperly formed list.", dotToken);
                }

                if (nextToken.type == TokenType.Dot)
                {
                    if (dotToken != null || thisToken.type != TokenType.OpenBracket)
                    {
                        throw ParserError.SyntaxError("parser", "Improperly formed dotted list", nextToken);
                    }
                    dotToken  = nextToken;
                    nextToken = moreTokens.ReadToken();
                    if (nextToken == null)
                    {
                        throw ParserError.SyntaxError("parser", "Improperly formed dotted list", dotToken);
                    }
                    if (nextToken.type == TokenType.CloseBracket)
                    {
                        throw ParserError.SyntaxError("parser", "Improperly formed dotted list", dotToken);
                    }
                    listContents.AddLast(ParseToken(nextToken, moreTokens));
                    nextToken = moreTokens.ReadToken();
                    if (nextToken.type != TokenType.CloseBracket)
                    {
                        throw ParserError.SyntaxError("parser", "Improperly formed dotted list", dotToken);
                    }
                    break;
                }
            }

            if (nextToken == null)
            {
                // Missing ')'
                throw ParserError.SyntaxError("parser", "missing close parenthesis", thisToken);
            }

            if (dotToken != null)
            {
                if (listContents.Count == 2)
                {
                    return(Syntax.Create(new Pair(listContents[0], listContents[1]), thisToken.location));
                }
                else
                {
                    throw ParserError.SyntaxError("parser", "improper dot syntax", thisToken);
                }
            }
            else
            {
                return(Syntax.Create(listContents, thisToken.location));
            }
        }
示例#19
0
 private void ReadToken() 
 {
   _tokenizer.ReadToken();
 }