Example #1
0
        public void FirstTest()
        {
            var luaFile = TestUtils.GetTestPath(@"lua-5.2.1-tests\literals.lua");

            var engine = Lua.CreateEngine();
            var context = Lua.GetLuaContext(engine);
            var unit = context.CreateFileUnit(luaFile);
            var reader = TestUtils.OpenReaderOrIgnoreTest(unit.GetReader);
            Console.WriteLine("Reading data from {0}", new Uri(luaFile));

            var tokenizer = new Tokenizer(ErrorSink.Default, new LuaCompilerOptions() { SkipFirstLine = true });
            tokenizer.Initialize(null, reader, unit, SourceLocation.MinValue);

            var fname = @"C:\tmp\tokenizer.txt";
            using (var fout = File.CreateText(fname))
            {
                foreach (var token in tokenizer.EnumerateTokens().TakeWhile(t => t.Symbol != Symbol.Eof))
                {
                    if (token.Symbol == Symbol.Whitespace)
                        continue;
                    if (token.Symbol == Symbol.EndOfLine)
                        continue;

                    fout.Write("{0,-12}", token.Symbol);
                    fout.Write("{0,-10}", token.Span.Start);
                    fout.Write("{0,-10}", token.Span.End);
                    fout.Write("{0}", token.Lexeme);

                    fout.WriteLine();
                }
            }
            Console.WriteLine("Written results to {0}", new Uri(fname));
        }
Example #2
0
        public void TestBlock()
        {
            const string input =
                "FBXHeaderExtension:  {\n" +
                "    ; This is a comment \n" +
                "    FBXHeaderVersion: 1003\n" +
                "    FBXVersion: 6100\n" +
                "    Creator: \"FBX SDK/FBX Plugins version 2010.2\"\n" +
                "    ; This is another comment \n" +
                "}\n";
            var tokens = new Token[] {
                new Token(TokenType.Name, "FBXHeaderExtension"),
                new Token(TokenType.Colon, ":"),
                new Token(TokenType.OpenBrace, "{"),
                new Token(TokenType.Name, "FBXHeaderVersion"),
                new Token(TokenType.Colon, ":"),
                new Token(TokenType.Number, "1003"),
                new Token(TokenType.Name, "FBXVersion"),
                new Token(TokenType.Colon, ":"),
                new Token(TokenType.Number, "6100"),
                new Token(TokenType.Name, "Creator"),
                new Token(TokenType.Colon, ":"),
                new Token(TokenType.String, "\"FBX SDK/FBX Plugins version 2010.2\""),
                new Token(TokenType.CloseBrace, "}"),
            };
            var tokenizer = new Tokenizer(input);

            // when
            var list = tokenizer.EnumerateTokens().ToList <Token>();

            // then
            CollectionAssert.AreEqual(tokens, list);
        }
Example #3
0
        public void TokenSequenceIsParsed(string value)
        {
            var expected = value
                           .Trim()
                           .Split(new[] { " ", '\u00a0'.ToString() }, StringSplitOptions.RemoveEmptyEntries)
                           .ToArray();

            var result = Tokenizer
                         .EnumerateTokens(value)
                         .Select(token => token.Value.Substring(token.Offset.Index, token.Offset.Count))
                         .ToArray();

            result.Should().ContainInOrder(expected);
        }
Example #4
0
        public void TestIgnoreWhitespace()
        {
            // given
            const string input     = " name 123 \"one two three\" ";
            var          tokenizer = new Tokenizer(input);

            // when
            var list = tokenizer.EnumerateTokens().ToList <Token>();

            // then
            Assert.AreEqual(3, list.Count);
            Assert.AreEqual(new Token(TokenType.Name, "name"), list[0]);
            Assert.AreEqual(new Token(TokenType.Number, "123"), list[1]);
            Assert.AreEqual(new Token(TokenType.String, "\"one two three\""), list[2]);
        }
Example #5
0
        public void TestEnumerateTokens()
        {
            // given
            const string input     = "***";
            var          token     = new Token(TokenType.Star, "*");
            var          tokenizer = new Tokenizer(input);

            // when
            var list = tokenizer.EnumerateTokens().ToList <Token>();

            // then
            Assert.AreEqual(3, list.Count);
            Assert.AreEqual(token, list[0]);
            Assert.AreEqual(token, list[1]);
            Assert.AreEqual(token, list[2]);
        }
Example #6
0
        public static unsafe double Run(string value)
        {
            var locals = new Stack <Operand>();

            foreach (var token in Tokenizer.EnumerateTokens(value))
            {
                var span = token.Value.AsSpan(token.Offset.Index, token.Offset.Count);

                if (token.Offset.Count == 1 && _operationMap.TryGetValue(span[0], out var operation))
                {
                    operation.Apply(locals);
                }
                else if (Double.TryParse(span, out var numeric))
                {
                    locals.Push(new Operand(numeric));
                }
                else
                {
                    throw new InvalidExpressionFormatExteption(token.Value);
                }
            }

            return(locals.Count == 1 ? locals.Pop().Value : throw new InvalidExpressionFormatExteption(value));
        }
Example #7
0
        public void RunLexerOnLuaTestSuiteFile(string luaFile, bool useLua52)
        {
            var options = new LuaCompilerOptions()
            {
                SkipFirstLine = true,
                UseLua52Features = useLua52,
            };

            var engine = Lua.CreateEngine();
            var context = Lua.GetLuaContext(engine);
            var unit = context.CreateFileUnit(luaFile);
            var reader = TestUtils.OpenReaderOrIgnoreTest(unit.GetReader);

            var tokenizer = new Tokenizer(ErrorSink.Default, options);

            var sw = new Stopwatch();
            sw.Start();

            tokenizer.Initialize(null, reader, unit, SourceLocation.MinValue);
            int counter = tokenizer.EnumerateTokens()
                                   .TakeWhile(t => t.Symbol != Symbol.Eof)
                                   .Count();

            sw.Stop();
            Console.WriteLine("Tokenizer run: {0} ms, {1} tokens", sw.ElapsedMilliseconds, counter);
        }
Example #8
0
        public void LexerErrorReportTests(string snippet, string expect)
        {
            bool mustfail = TestContext.CurrentContext.Test.Properties.Contains("FailureCase");

            var tokenizer = new Tokenizer(ErrorSink.Default, new LuaCompilerOptions() { SkipFirstLine = true });

            var sourceUnit = engine.GetLuaContext().CreateSnippet(snippet, SourceCodeKind.Expression);

            tokenizer.Initialize(null, sourceUnit.GetReader(), sourceUnit, SourceLocation.MinValue);
            try
            {
                var unused = tokenizer.EnumerateTokens(s => true) // all tokens
                                      .TakeWhile(t => t.Symbol != Symbol.Eof)
                                      .Last();
                if (mustfail)
                    Assert.Fail("Expected a SyntaxErrorException");
            }
            catch (SyntaxErrorException ex)
            {
                Assert.That(ex.Message, Is.EqualTo(expect));
            }
        }
Example #9
0
 public void TokenSequenceIsEmptyWhenTextContainsNoTokens(string value)
 => Tokenizer
 .EnumerateTokens(value)
 .ToArray()
 .Should()
 .HaveCount(0);