示例#1
0
        public void BadTokenPosition()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            try
            {
                tokenizer.Tokenize("5 A");
            }
            catch (UnknownTokenException ex)
            {
                Assert.AreEqual(3, ex.Position.Column);
                Assert.AreEqual(1, ex.Position.Line);
                Assert.AreEqual("A", ex.Token);
            }

            try
            {
                tokenizer.Tokenize("5 4\r\n2\r\n   X\r\n5");
            }
            catch (UnknownTokenException ex)
            {
                Assert.AreEqual(4, ex.Position.Column);
                Assert.AreEqual(3, ex.Position.Line);
                Assert.AreEqual("X", ex.Token);
            }
        }
示例#2
0
        public void NumericLiterals()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new DecimalLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            Token[] tokens;

            tokens = tokenizer.Tokenize("10 10.0");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10", tokens[0].Text);
            Assert.AreEqual("10.0", tokens[2].Text);

            tokens = tokenizer.Tokenize("10m 10ul");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10m", tokens[0].Text);
            Assert.AreEqual("10ul", tokens[2].Text);

            tokens = tokenizer.Tokenize("10f 10l");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10f", tokens[0].Text);
            Assert.AreEqual("10l", tokens[2].Text);
        }
        public void NumericLiterals()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new DecimalLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            Token[] tokens;

            tokens = tokenizer.Tokenize("10 10.0");

            Assert.AreEqual(3,tokens.Length);
            Assert.AreEqual("10",tokens[0].Text);
            Assert.AreEqual("10.0",tokens[2].Text);

            tokens = tokenizer.Tokenize("10m 10ul");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10m", tokens[0].Text);
            Assert.AreEqual("10ul", tokens[2].Text);

            tokens = tokenizer.Tokenize("10f 10l");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("10f", tokens[0].Text);
            Assert.AreEqual("10l", tokens[2].Text);
        }
示例#4
0
        public void BadTokenPosition()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            try
            {
                tokenizer.Tokenize("5 A");
            }
            catch (UnknownTokenException ex)
            {
                Assert.AreEqual(3, ex.Position.Column);
                Assert.AreEqual(1, ex.Position.Line);
                Assert.AreEqual("A", ex.Token);
            }

            try
            {
                tokenizer.Tokenize("5 4\r\n2\r\n   X\r\n5");
            }
            catch (UnknownTokenException ex)
            {
                Assert.AreEqual(4, ex.Position.Column);
                Assert.AreEqual(3, ex.Position.Line);
                Assert.AreEqual("X",ex.Token);
            }
        }
示例#5
0
        public void BadToken()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            tokenizer.Tokenize("5 A");
        }
示例#6
0
        public void BadToken()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            tokenizer.Tokenize("5 A");
        }
示例#7
0
        public void StartsAndEndsWithToken()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new StartsAndEndsWithMatcher("<!--", "-->", '"'));
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            Token[] tokens;

            tokens = tokenizer.Tokenize("<!--test-->  <!-- test 2 -->");

            Assert.AreEqual(3, tokens.Length);

            tokens = tokenizer.Tokenize("<!--test \"-->\"-->  <!-- test 2 -->");

            Assert.AreEqual(3, tokens.Length);
        }
示例#8
0
        //[ExpectedException(typeof(UnknownTokenException))]
        public void BadToken()
        {
            try
            {
                Tokenizer tokenizer = new Tokenizer();

                tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
                tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

                tokenizer.Tokenize("5 A");

                Assert.Fail();
            }
            catch (UnknownTokenException ex)
            {
            }
        }
示例#9
0
        public void BadToken()
        {
            try
            {
                Tokenizer tokenizer = new Tokenizer();

                tokenizer.AddTokenMatcher(new IntegerLiteralMatcher());
                tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

                tokenizer.Tokenize("5 A");

                Assert.Fail();
            }
            catch(UnknownTokenException ex)
            {
            }
        }
示例#10
0
        public void TestFallback()
        {
            ITokenMatcher matcher1 = new CharMatcher('(');
            ITokenMatcher matcher2 = new CharMatcher(')');
            ITokenMatcher matcher3 = new StringMatcher("(test)");
            ITokenMatcher matcher4 = new AnyCharMatcher("abcdefghijklmnopqrstuvwxyz");

            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(matcher1);
            tokenizer.AddTokenMatcher(matcher2);
            tokenizer.AddTokenMatcher(matcher3);
            tokenizer.AddTokenMatcher(matcher4);

            Token[] tokens = tokenizer.Tokenize("(test)(x)");

            Assert.AreEqual(4, tokens.Length);
        }
示例#11
0
        public void TestStringLiteral()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new StringLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
            tokenizer.AddTokenMatcher(new CharMatcher('+'));

            Token[] tokens = tokenizer.Tokenize("\"test1\" + \"test2\"");

            Assert.AreEqual(5, tokens.Length);
            Assert.AreEqual("\"test1\"", tokens[0].Text);
            Assert.AreEqual("\"test2\"", tokens[4].Text);

            tokens = tokenizer.Tokenize("\"test1\" + \"test\\\"2\"");

            Assert.AreEqual(5, tokens.Length);
            Assert.AreEqual("\"test1\"", tokens[0].Text);
            Assert.AreEqual("\"test\\\"2\"", tokens[4].Text);
        }
        public void TestStringLiteral()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new StringLiteralMatcher());
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
            tokenizer.AddTokenMatcher(new CharMatcher('+'));

            Token[] tokens = tokenizer.Tokenize("\"test1\" + \"test2\"");

            Assert.AreEqual(5, tokens.Length);
            Assert.AreEqual("\"test1\"", tokens[0].Text);
            Assert.AreEqual("\"test2\"", tokens[4].Text);

            tokens = tokenizer.Tokenize("\"test1\" + \"test\\\"2\"");

            Assert.AreEqual(5, tokens.Length);
            Assert.AreEqual("\"test1\"", tokens[0].Text);
            Assert.AreEqual("\"test\\\"2\"", tokens[4].Text);
        }
示例#13
0
        public void TestAnySequence()
        {
            ITokenMatcher matcher = new CompositeMatcher(
                new AnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"),
                new SequenceOfAnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789")
                );

            ITokenMatcher alphaMatcher = new SequenceOfAnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789");

            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(matcher);
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
            tokenizer.AddTokenMatcher(new CharMatcher('.'));

            Token[] tokens = tokenizer.Tokenize("Math.Max");

            Assert.AreEqual(3, tokens.Length);
            Assert.AreEqual("Math", tokens[0].Text);
            Assert.AreEqual(".", tokens[1].Text);
            Assert.AreEqual("Max", tokens[2].Text);
        }
        public void TestFallback()
        {
            ITokenMatcher matcher1 = new CharMatcher('(');
            ITokenMatcher matcher2 = new CharMatcher(')');
            ITokenMatcher matcher3 = new StringMatcher("(test)");
            ITokenMatcher matcher4 = new AnyCharMatcher("abcdefghijklmnopqrstuvwxyz");

            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(matcher1);
            tokenizer.AddTokenMatcher(matcher2);
            tokenizer.AddTokenMatcher(matcher3);
            tokenizer.AddTokenMatcher(matcher4);

            Token[] tokens = tokenizer.Tokenize("(test)(x)");

            Assert.AreEqual(4,tokens.Length);

        }
        public void StartsAndEndsWithToken()
        {
            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(new StartsAndEndsWithMatcher("<!--","-->",'"'));
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());

            Token[] tokens;

            tokens = tokenizer.Tokenize("<!--test-->  <!-- test 2 -->");

            Assert.AreEqual(3,tokens.Length);

            tokens = tokenizer.Tokenize("<!--test \"-->\"-->  <!-- test 2 -->");

            Assert.AreEqual(3, tokens.Length);
        }
        public void TestAnySequence()
        {
            ITokenMatcher matcher = new CompositeMatcher(
                new AnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"),
                new SequenceOfAnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789")
                );

            ITokenMatcher alphaMatcher = new SequenceOfAnyCharMatcher("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789");

            Tokenizer tokenizer = new Tokenizer();

            tokenizer.AddTokenMatcher(matcher);
            tokenizer.AddTokenMatcher(new WhiteSpaceMatcher());
            tokenizer.AddTokenMatcher(new CharMatcher('.'));

            Token[] tokens = tokenizer.Tokenize("Math.Max");

            Assert.AreEqual(3,tokens.Length);
            Assert.AreEqual("Math",tokens[0].Text);
            Assert.AreEqual(".", tokens[1].Text);
            Assert.AreEqual("Max", tokens[2].Text);




        }