Beispiel #1
0
        public void TestEmptyString()
        {
            Lexer lexer = new Lexer("");
            List<Token> tokens = lexer.Tokenize();

            CheckTokens(tokens,
                TokenType.Eof);
        }
Beispiel #2
0
        public void TestIdToken1()
        {
            Lexer lexer = new Lexer("abc");
            List<Token> tokens = lexer.Tokenize();

            CheckTokens(tokens,
                TokenType.Id,
                TokenType.Eof);

            Assert.AreEqual(tokens[0].Value, "abc");
        }
Beispiel #3
0
        public void TestIdAndParentheses()
        {
            Lexer lexer = new Lexer("print()");
            List<Token> tokens = lexer.Tokenize();

            CheckTokens(tokens,
                TokenType.Id,
                TokenType.LParen,
                TokenType.RParen,
                TokenType.Eof);

            Assert.AreEqual(tokens[0].Value, "print");
        }
Beispiel #4
0
        public void TestAllTokens()
        {
            Lexer lexer = new Lexer("abc 123 { } ( ) , ; if else");
            List<Token> tokens = lexer.Tokenize();

            CheckTokens(tokens,
                TokenType.Id,
                TokenType.Num,
                TokenType.LBrace,
                TokenType.RBrace,
                TokenType.LParen,
                TokenType.RParen,
                TokenType.Comma,
                TokenType.Semicolon,
                TokenType.If,
                TokenType.Else,
                TokenType.Eof);
        }
Beispiel #5
0
        public void TestNumToken2()
        {
            Lexer lexer = new Lexer("123abc");
            List<Token> tokens = lexer.Tokenize();

            CheckTokens(tokens,
                TokenType.Num,
                TokenType.Id,
                TokenType.Eof);

            Assert.AreEqual(tokens[0].Value, "123");
            Assert.AreEqual(tokens[1].Value, "abc");
        }
Beispiel #6
0
        public void TestIfAndParentheses()
        {
            Lexer lexer = new Lexer("\n    \nif  \n\n(\n \n)  \n\n{    \n }");
            List<Token> tokens = lexer.Tokenize();

            CheckTokens(tokens,
                TokenType.If,
                TokenType.LParen,
                TokenType.RParen,
                TokenType.LBrace,
                TokenType.RBrace,
                TokenType.Eof);
        }