示例#1
0
        public void SimpleParseTest()
        {
            var codes = new[] {
                @"<?php
class X {
    function static() { }
}",
                @"<?php
class enum extends A {
}",
                @"<?php
A::E->foo(); // dereferencable class const
",
            };

            foreach (var code in codes)
            {
                var sourceUnit = new CodeSourceUnit(code, "dummy.php", Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Basic);
                var factory    = new BasicNodesFactory(sourceUnit);
                var errors     = new TestErrorSink();

                sourceUnit.Parse(factory, errors, new TestErrorRecovery());

                Assert.IsNotNull(sourceUnit.Ast);
            }
        }
示例#2
0
        public void LexerGetNextTokenTest()
        {
            string path = (string)TestContext.DataRow["files"];

            TestErrorSink errorSink = new TestErrorSink();
            Lexer         lexer     = new Lexer(new StreamReader(path), Encoding.UTF8, errorSink,
                                                LanguageFeatures.ShortOpenTags, 0, Lexer.LexicalStates.INITIAL);

            string parsed = ParseByPhp(path);

            parsed = parsed.Substring(0, parsed.LastIndexOf('-'));
            parsed = Regex.Replace(parsed.Replace("\r", " ").Replace("\n", " "), @"\s+", " ");
            int i = 0;

            string[][] expectedTokens = (
                from s in parsed.Split('-')
                let num = i++
                          group s by num / 3 into g
                          select g.ToArray()
                ).ToArray();

            //List<KeyValuePair<Tokens, string>> l = new List<KeyValuePair<Tokens, string>>();
            //Tokens t = Tokens.END;
            //while ((t = (Tokens)lexer.GetNextToken()) != Tokens.END)
            //{
            //    l.Add(new KeyValuePair<Tokens, string>(t, lexer.TokenText));
            //}

            foreach (var expectedToken in expectedTokens)
            {
                Tokens token = (Tokens)lexer.GetNextToken();
                Assert.AreEqual(int.Parse(expectedToken[0]), (int)token, path);
                if (token == Tokens.T_VARIABLE || token == Tokens.T_STRING || token == Tokens.T_END_HEREDOC)
                {
                    Assert.AreEqual(expectedToken[2].TrimStart('$'), lexer.TokenValue.Object.ToString());
                }
                if (token == Tokens.T_DNUMBER)
                {
                    Assert.AreEqual(double.Parse(expectedToken[2], System.Globalization.NumberFormatInfo.InvariantInfo), lexer.TokenValue.Double);
                }
                if (token == Tokens.T_LNUMBER)
                {
                    Assert.AreEqual(int.Parse(expectedToken[2]), lexer.TokenValue.Integer);
                }
                //lexer.RestoreCompressedState(lexer.GetCompressedState());
            }
            Assert.AreEqual(Tokens.EOF, lexer.GetNextToken(), path);
            Assert.AreEqual(Tokens.EOF, lexer.GetNextToken(), path);
            Assert.AreEqual(Tokens.EOF, lexer.GetNextToken(), path);
            Assert.AreEqual(0, errorSink.Errors.Count);
        }
示例#3
0
        public void SimpleParseTest()
        {
            string code = @"<?php
class X {
    function static() { }
}
";

            var sourceUnit = new CodeSourceUnit(code, "dummy.php", Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Basic);
            var factory    = new BasicNodesFactory(sourceUnit);
            var errors     = new TestErrorSink();

            sourceUnit.Parse(factory, errors, new TestErrorRecovery());
        }
        public void TokensVisitorTest()
        {
            string path = (string)TestContext.DataRow["files"];

            if (path.Contains("functions1.phpt"))
            {
                return; // TODO - too slow test
            }
            string testcontent = File.ReadAllText(path);
            var    original    = testcontent;
            var    sourceUnit  = new TestSourceUnit(original, path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Php71Set);
            var    factory     = new BasicNodesFactory(sourceUnit);
            var    errors      = new TestErrorSink();

            GlobalCode ast = null;

            sourceUnit.Parse(factory, errors, new TestErrorRecovery());
            ast = sourceUnit.Ast;
            if (errors.Count != 0)
            {
                return; // AST is null or invalid
            }

            var provider = SourceTokenProviderFactory.CreateProvider(sourceUnit.SourceLexer.AllTokens, original);
            var composer = new WhitespaceComposer(provider);
            var visitor  = new TokenVisitor(new TreeContext(ast), composer, provider);

            visitor.VisitElement(ast);
            var code = composer.Code;

            var result = code.ToString();

            //File.WriteAllText(Path.Combine(Directory.GetParent(path).FullName, "original.txt"), original);
            //File.WriteAllText(Path.Combine(Directory.GetParent(path).FullName, "result.txt"), result);
            //Assert.AreEqual(original.Length, result.Length);
            //for (int i = 0; i < original.Length; i++)
            //{
            //    Assert.AreEqual(original[i], result[i]);
            //}
            Assert.AreEqual(original, result);
            var tokens = provider.GetTokens(new Span(0, original.Length)).AsArray();

            Assert.AreEqual(tokens.Length, composer.Processed.Count);
            for (int i = 0; i < tokens.Length; i++)
            {
                Assert.AreEqual(tokens[i].Token, composer.Processed[i].Token);
                Assert.AreEqual(tokens[i].Span, composer.Processed[i].Span);
            }
        }
示例#5
0
        public void MatchTest()
        {
            var codes = new[] {
                @"<?php echo match($x) { 0 => 'hello', 1, 2, 3 => 'world', default => '!', };",
            };

            foreach (var code in codes)
            {
                var errors = new TestErrorSink();
                var unit   = new CodeSourceUnit(code, "dummy.php", Encoding.UTF8, features: LanguageFeatures.Php80Set);
                unit.Parse(new BasicNodesFactory(unit), errors);

                Assert.AreEqual(0, errors.Count);
            }
        }
示例#6
0
        public void InstanceOfTest()
        {
            var codes = new[] {
                @"<?php echo $x instanceof ((string)$y);",
                @"<?php echo $x instanceof $y[0];",
            };

            foreach (var code in codes)
            {
                var errors = new TestErrorSink();
                var unit   = new CodeSourceUnit(code, "dummy.php", Encoding.UTF8, features: LanguageFeatures.Php80Set);
                unit.Parse(new BasicNodesFactory(unit), errors);

                Assert.AreEqual(0, errors.Count);
            }
        }
示例#7
0
        public void LexerGetNextTokenByLineTest()
        {
            string path = (string)TestContext.DataRow["files"];

            TestErrorSink errorSink = new TestErrorSink();
            Lexer         lexer     = new Lexer(new StreamReader(path), Encoding.UTF8, errorSink,
                                                LanguageFeatures.ShortOpenTags, 0, Lexer.LexicalStates.INITIAL);

            Lexer.LexicalStates previousState = Lexer.LexicalStates.INITIAL;
            foreach (var line in File.ReadAllLines(path))
            {
                lexer.Initialize(new StringReader(line + Environment.NewLine), previousState, true, 0);

                while (lexer.GetNextToken() != Tokens.EOF)
                {
                    Assert.IsTrue(lexer.TokenSpan.IsValid);
                }
                previousState = lexer.CurrentLexicalState;
            }
        }
示例#8
0
        public void VisitorVisitTests()
        {
            string path        = (string)TestContext.DataRow["files"];
            string testcontent = File.ReadAllText(path);

            string[] testparts = testcontent.Split(new string[] { "<<<TEST>>>" }, StringSplitOptions.RemoveEmptyEntries);
            Assert.IsTrue(testparts.Length >= 2);

            var sourceUnit = new CodeSourceUnit(testparts[0], path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Basic);
            var factory    = new AstCounterFactory(sourceUnit);
            var errors     = new TestErrorSink();

            GlobalCode ast = null;

            using (StringReader source_reader = new StringReader(testparts[0]))
            {
                sourceUnit.Parse(factory, errors, new TestErrorRecovery());
                ast = sourceUnit.Ast;
            }
            if (testparts[1].TrimStart().StartsWith(ParserTests.Errors))
            {
                var matches     = _errorRegex.Matches(testparts[1]);
                var knownErrors = matches[0].Groups["Number"].Value.Split(',');
                Assert.AreEqual(1, matches.Count, path);
                Assert.AreEqual(knownErrors.Length, errors.Count, path);
            }
            else
            {
                Assert.AreEqual(0, errors.Count, path);
                Assert.IsNotNull(ast);

                // check every node has a parent
                var checker = new TreeVisitorCheck();
                checker.VisitElement(ast);
                Assert.AreEqual(factory.CreatedElements.Count, checker.VisitedElements.Count, path);
                Assert.AreEqual(factory.ItemCount, checker.ItemCount, path);
                Assert.AreEqual(factory.ForeachVarCount, checker.ForeachVarCount, path);
                //var dictionary = factory.CreatedElements.GroupBy(t => t.GetType()).ToDictionary(g => g.Key);
            }
        }
示例#9
0
        public void VisitorVisitTests()
        {
            string path        = (string)TestContext.DataRow["files"];
            string testcontent = File.ReadAllText(path);

            string[] testparts = testcontent.Split(new string[] { "<<<TEST>>>" }, StringSplitOptions.RemoveEmptyEntries);
            Assert.IsTrue(testparts.Length >= 2);

            var sourceUnit = new CodeSourceUnit(testparts[0], path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Basic);
            var factory    = new AstCounterFactory(sourceUnit);
            var errors     = new TestErrorSink();

            bool expectErrors = testparts[1].TrimStart().StartsWith(ParserTests.Errors);

            GlobalCode ast = null;

            Parser parser = new Parser();

            using (StringReader source_reader = new StringReader(testparts[0]))
            {
                sourceUnit.Parse(factory, errors);
                ast = sourceUnit.Ast;
            }
            if (expectErrors)
            {
                Assert.AreEqual(1, errors.Count, path);
            }
            else
            {
                Assert.AreEqual(0, errors.Count, path);

                // check every node has a parent
                var checker = new TreeVisitorCheck();
                checker.VisitElement(ast);
                Assert.AreEqual(factory.CreatedElements.Count, checker.VisitedElements.Count, path);
                Assert.AreEqual(factory.ItemCount, checker.ItemCount, path);
                Assert.AreEqual(factory.ForeachVarCount, checker.ForeachVarCount, path);
            }
        }
示例#10
0
        public void LexerStringsTest()
        {
            TestErrorSink errorSink = new TestErrorSink();
            Lexer         lexer     = new Lexer(new StringReader("\"\""), Encoding.UTF8, errorSink,
                                                LanguageFeatures.ShortOpenTags, 0, Lexer.LexicalStates.INITIAL);

            var charSet = new[] { new [] { '$', '{', 'n', '\0', '\r', '\n', ' ' },
                                  new [] { '\'', '\\', 'x', 'c', '"', '`', '8', '0' },
                                  new [] { '/', '*', '?', '>', ';' } };

            int[]  word = new int[5];
            char[] text = new char[word.Length];

            var states = new Lexer.LexicalStates[] { Lexer.LexicalStates.ST_DOUBLE_QUOTES, Lexer.LexicalStates.ST_SINGLE_QUOTES,
                                                     Lexer.LexicalStates.ST_BACKQUOTE, Lexer.LexicalStates.ST_HEREDOC, Lexer.LexicalStates.ST_NOWDOC, Lexer.LexicalStates.ST_COMMENT,
                                                     Lexer.LexicalStates.ST_DOC_COMMENT, Lexer.LexicalStates.INITIAL, Lexer.LexicalStates.ST_IN_SCRIPTING };

            foreach (var chars in charSet)
            {
                foreach (var state in states)
                {
                    while (Increment(word, chars.Length))
                    {
                        ToArray(word, text, chars);
                        string line = new string(text);
                        lexer.Initialize(new StringReader(line), state, true, 0);
                        Tokens token = Tokens.EOF;
                        int    count = 0;
                        while ((token = lexer.GetNextToken()) != Tokens.EOF && count++ < 100)
                        {
                            Assert.IsTrue(lexer.TokenSpan.IsValid, line);
                            Assert.IsTrue(lexer.TokenSpan.Length >= 0, line + " - " + state.ToString() + " - " + lexer.TokenSpan.Start.ToString());
                        }
                        Assert.IsTrue(count < 100, line);
                    }
                }
            }
        }
示例#11
0
        public void ParserParseTest()
        {
            string path        = (string)TestContext.DataRow["files"];
            string testcontent = File.ReadAllText(path);

            string[] testparts = testcontent.Split(new string[] { "<<<TEST>>>" }, StringSplitOptions.RemoveEmptyEntries);
            Assert.IsTrue(testparts.Length >= 2);

            var sourceUnit = new CodeSourceUnit(testparts[0], path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Basic);
            var factory    = new BasicNodesFactory(sourceUnit);
            var errors     = new TestErrorSink();

            //
            sourceUnit.Parse(factory, errors, new TestErrorRecovery());

            //
            if (testparts[1].TrimStart().StartsWith(Errors))
            {
                var matches     = _errorRegex.Matches(testparts[1]);
                var knownErrors = matches[0].Groups["Number"].Value.Split(',');
                Assert.AreEqual(1, matches.Count, path);
                Assert.AreEqual(knownErrors.Length, errors.Count, path);
                int errorid = 0;
                for (int i = 0; i < knownErrors.Length; i++)
                {
                    Assert.IsTrue(int.TryParse(knownErrors[i], out errorid), path);
                    Assert.AreEqual(errorid, errors.Errors[i].Error.Id, path);
                    Assert.IsNotNull(errors.Errors[i].ToString());
                }
                testparts[1] = matches[0].Groups["JSON"].Value;
            }
            else
            {
                Assert.AreEqual(0, errors.Count, path);
            }

            Assert.IsNotNull(sourceUnit.Ast);

            var            serializer = new JsonNodeWriter();
            TreeSerializer visitor    = new TreeSerializer(serializer);

            sourceUnit.Ast.VisitMe(visitor);

            Regex  rgx      = new Regex(@"""Span""[^}]*},?\s*\n?"); // omit Span for more compact testing (position must be verified separately)
            string expected = rgx.Replace(testparts[1].Trim().Replace("\r", string.Empty).Replace("\n", string.Empty).Replace(" ", string.Empty), string.Empty);
            string actual   = rgx.Replace(serializer.ToString().Replace("\r", string.Empty).Replace("\n", string.Empty).Replace(" ", string.Empty), string.Empty);

            if (testparts[1].Trim() != "<<<IGNORE>>>")
            {
                // IMPORTANT - Uncomment to regenerate test data
                //File.WriteAllText(path, testparts[0] + "\n<<<TEST>>>\n" + rgx.Replace(serializer.ToString(), string.Empty));
                Assert.AreEqual(expected, actual, path);
            }

            // check every node has a parent
            var parentChecker = new ContainingElementCheck();

            parentChecker.VisitGlobalCode(sourceUnit.Ast);

            // check nodes have correct span corresponding to correct source text
            var spanChecker = new NameSpanCheck(testparts[0]);

            spanChecker.VisitGlobalCode(sourceUnit.Ast);
        }
示例#12
0
        public void EmptyTokensVisitorTest()
        {
            string path = (string)TestContext.DataRow["files"];

            if (path.Contains("functions1.phpt"))
            {
                return; // TODO - too slow test
            }
            string testcontent = File.ReadAllText(path);
            var    original    = testcontent;

            if (original.Contains("namespace\\"))
            {
                return; // TODO - current namespace cannot be decided from AST
            }

            var sourceUnit = new TestSourceUnit(original, path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Php71Set);
            var factory    = new BasicNodesFactory(sourceUnit);
            var errors     = new TestErrorSink();

            sourceUnit.Parse(factory, errors, new TestErrorRecovery());
            GlobalCode ast = sourceUnit.Ast;

            if (errors.Count != 0)
            {
                return; // AST is null or invalid
            }

            var provider = SourceTokenProviderFactory.CreateEmptyProvider();
            var composer = new EmptyComposer(provider);
            var visitor  = new TokenVisitor(new TreeContext(ast), composer, provider);

            visitor.VisitElement(ast);
            var code = composer.Code.ToString();

            var expectedStr = PrepareString(original);
            var actualStr   = PrepareString(code);

            Assert.AreEqual(expectedStr, actualStr);
            var expected = FilterTokens(sourceUnit.SourceLexer.AllTokens);
            var actual   = FilterTokens(composer.Processed);

            Assert.AreEqual(expected.Length, actual.Length);
            for (int i = 0; i < Math.Min(expected.Length, actual.Length); i++)
            {
                if (expected[i].Token == Tokens.T_SEMI && actual[i].Token == Tokens.T_CASE)
                {
                }
                if (expected[i].Token == Tokens.T_LOGICAL_OR && actual[i].Token == Tokens.T_BOOLEAN_OR ||
                    expected[i].Token == Tokens.T_LOGICAL_AND && actual[i].Token == Tokens.T_BOOLEAN_AND)
                {
                }
                else
                {
                    Assert.AreEqual(expected[i].Token, actual[i].Token);
                }
            }


            sourceUnit = new TestSourceUnit(code, path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Php71Set);
            sourceUnit.Parse(factory, errors, new TestErrorRecovery());
            var newAst = sourceUnit.Ast;

            var serializer        = new JsonNodeWriter();
            var serializerVisitor = new TreeSerializer(serializer);

            ast.VisitMe(visitor);
            expectedStr       = serializer.ToString();
            serializer        = new JsonNodeWriter();
            serializerVisitor = new TreeSerializer(serializer);
            newAst.VisitMe(visitor);
            actualStr = serializer.ToString();
            Assert.AreEqual(expectedStr, actualStr);
        }