public void ToStringTest() { INodeWriter serializer = new JsonNodeWriter(); Assert.AreEqual(string.Empty, serializer.ToString()); serializer.Serialize("hello", null); Assert.AreEqual("\"hello\" : {\n}", serializer.ToString()); }
public void EndSerializeTest() { INodeWriter serializer = new JsonNodeWriter(); serializer.EndSerialize(null); Assert.AreEqual("}", serializer.ToString()); serializer = new JsonNodeWriter(); Assert.AreEqual(string.Empty, serializer.ToString()); serializer.EndSerialize(new NodeObj("hello", "world")); Assert.AreEqual("\"hello\" : \"world\"\n}", serializer.ToString()); }
public void StartSerializeTest() { INodeWriter serializer = new JsonNodeWriter(); serializer.StartSerialize("hello", null); Assert.AreEqual("\"hello\" : {", serializer.ToString()); serializer = new JsonNodeWriter(); serializer.StartSerialize("hello", new NodeObj("hello", "world")); Assert.AreEqual("\"hello\" : {\n \"hello\" : \"world\"", serializer.ToString()); serializer.StartSerialize("world", new NodeObj("hello", "world")); Assert.AreEqual("\"hello\" : {\n \"hello\" : \"world\",\n \"world\" : {\n \"hello\" : \"world\"", serializer.ToString()); }
public void ParserParseTest() { string path = (string)TestContext.DataRow["files"]; string testcontent = File.ReadAllText(path); string[] testparts = testcontent.Split(new string[] { "<<<TEST>>>" }, StringSplitOptions.RemoveEmptyEntries); Assert.IsTrue(testparts.Length >= 2); var sourceUnit = new CodeSourceUnit(testparts[0], path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Basic); var factory = new BasicNodesFactory(sourceUnit); var errors = new TestErrorSink(); // sourceUnit.Parse(factory, errors, new TestErrorRecovery()); // if (testparts[1].TrimStart().StartsWith(Errors)) { var matches = _errorRegex.Matches(testparts[1]); var knownErrors = matches[0].Groups["Number"].Value.Split(','); Assert.AreEqual(1, matches.Count, path); Assert.AreEqual(knownErrors.Length, errors.Count, path); int errorid = 0; for (int i = 0; i < knownErrors.Length; i++) { Assert.IsTrue(int.TryParse(knownErrors[i], out errorid), path); Assert.AreEqual(errorid, errors.Errors[i].Error.Id, path); Assert.IsNotNull(errors.Errors[i].ToString()); } testparts[1] = matches[0].Groups["JSON"].Value; } else { Assert.AreEqual(0, errors.Count, path); } Assert.IsNotNull(sourceUnit.Ast); var serializer = new JsonNodeWriter(); TreeSerializer visitor = new TreeSerializer(serializer); sourceUnit.Ast.VisitMe(visitor); Regex rgx = new Regex(@"""Span""[^}]*},?\s*\n?"); // omit Span for more compact testing (position must be verified separately) string expected = rgx.Replace(testparts[1].Trim().Replace("\r", string.Empty).Replace("\n", string.Empty).Replace(" ", string.Empty), string.Empty); string actual = rgx.Replace(serializer.ToString().Replace("\r", string.Empty).Replace("\n", string.Empty).Replace(" ", string.Empty), string.Empty); if (testparts[1].Trim() != "<<<IGNORE>>>") { // IMPORTANT - Uncomment to regenerate test data //File.WriteAllText(path, testparts[0] + "\n<<<TEST>>>\n" + rgx.Replace(serializer.ToString(), string.Empty)); Assert.AreEqual(expected, actual, path); } // check every node has a parent var parentChecker = new ContainingElementCheck(); parentChecker.VisitGlobalCode(sourceUnit.Ast); // check nodes have correct span corresponding to correct source text var spanChecker = new NameSpanCheck(testparts[0]); spanChecker.VisitGlobalCode(sourceUnit.Ast); }
public void SerializerTreeVisitorVisitStringLiteralTest() { var serializer = new JsonNodeWriter(); TreeSerializer visitor = new TreeSerializer(serializer); visitor.VisitStringLiteral(new StringLiteral(new Span(0, 10), "hello world")); Assert.AreEqual("\"StringLiteral\" : {\n \"Span\" : {\n \"start\" : \"0\",\n \"end\" : \"10\"\n },\n \"Value\" : \"hello world\"\n}", serializer.ToString()); }
public void EmptyTokensVisitorTest() { string path = (string)TestContext.DataRow["files"]; if (path.Contains("functions1.phpt")) { return; // TODO - too slow test } string testcontent = File.ReadAllText(path); var original = testcontent; if (original.Contains("namespace\\")) { return; // TODO - current namespace cannot be decided from AST } var sourceUnit = new TestSourceUnit(original, path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Php71Set); var factory = new BasicNodesFactory(sourceUnit); var errors = new TestErrorSink(); sourceUnit.Parse(factory, errors, new TestErrorRecovery()); GlobalCode ast = sourceUnit.Ast; if (errors.Count != 0) { return; // AST is null or invalid } var provider = SourceTokenProviderFactory.CreateEmptyProvider(); var composer = new EmptyComposer(provider); var visitor = new TokenVisitor(new TreeContext(ast), composer, provider); visitor.VisitElement(ast); var code = composer.Code.ToString(); var expectedStr = PrepareString(original); var actualStr = PrepareString(code); Assert.AreEqual(expectedStr, actualStr); var expected = FilterTokens(sourceUnit.SourceLexer.AllTokens); var actual = FilterTokens(composer.Processed); Assert.AreEqual(expected.Length, actual.Length); for (int i = 0; i < Math.Min(expected.Length, actual.Length); i++) { if (expected[i].Token == Tokens.T_SEMI && actual[i].Token == Tokens.T_CASE) { } if (expected[i].Token == Tokens.T_LOGICAL_OR && actual[i].Token == Tokens.T_BOOLEAN_OR || expected[i].Token == Tokens.T_LOGICAL_AND && actual[i].Token == Tokens.T_BOOLEAN_AND) { } else { Assert.AreEqual(expected[i].Token, actual[i].Token); } } sourceUnit = new TestSourceUnit(code, path, Encoding.UTF8, Lexer.LexicalStates.INITIAL, LanguageFeatures.Php71Set); sourceUnit.Parse(factory, errors, new TestErrorRecovery()); var newAst = sourceUnit.Ast; var serializer = new JsonNodeWriter(); var serializerVisitor = new TreeSerializer(serializer); ast.VisitMe(visitor); expectedStr = serializer.ToString(); serializer = new JsonNodeWriter(); serializerVisitor = new TreeSerializer(serializer); newAst.VisitMe(visitor); actualStr = serializer.ToString(); Assert.AreEqual(expectedStr, actualStr); }