public void GetTokens_HtmlContent_ReturnsSequence() { const string input = @"<div class=""content""><p style=""color:red""><strong>Lorem ipsum</strong> dolor sit amet, <i>consectetur</i> adipiscing elit.</p></div>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("div")), MarkupGrammar.TokenAttribute(new DataName("class")), MarkupGrammar.TokenPrimitive("content"), MarkupGrammar.TokenElementBegin(new DataName("p")), MarkupGrammar.TokenAttribute(new DataName("style")), MarkupGrammar.TokenPrimitive("color:red"), MarkupGrammar.TokenElementBegin(new DataName("strong")), MarkupGrammar.TokenPrimitive("Lorem ipsum"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenPrimitive(" dolor sit amet, "), MarkupGrammar.TokenElementBegin(new DataName("i")), MarkupGrammar.TokenPrimitive("consectetur"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenPrimitive(" adipiscing elit."), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd, }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_MixedEntities_ReturnsSequence() { const string input = @"there should <b>e decoded chars & inside this text"; var expected = new[] { MarkupGrammar.TokenPrimitive(@"there should <b>e decoded chars & inside this text") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_EntityWithTrailingText_ReturnsSequence() { const string input = @"&trailing"; var expected = new[] { MarkupGrammar.TokenPrimitive("&trailing") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_HtmlEntityEuro_ReturnsSequence() { const string input = @"€"; var expected = new[] { MarkupGrammar.TokenPrimitive("€") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_XmlEntityHexLowerCase_ReturnsSequence() { const string input = @"ꯍ"; var expected = new[] { MarkupGrammar.TokenPrimitive("\uabcd") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_XmlDeclaration_ReturnsUnparsed() { const string input = @"<?xml version=""1.0""?>"; var expected = new[] { MarkupGrammar.TokenUnparsed("?", "?", @"xml version=""1.0""") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_XmlComment_ReturnsUnparsed() { const string input = @"<!-- a quick note -->"; var expected = new[] { MarkupGrammar.TokenUnparsed("!--", "--", @" a quick note ") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_XmlCData_ReturnsTextValue() { const string input = @"<![CDATA[value>""0"" && value<""10"" ?""valid"":""error""]]>"; var expected = new[] { MarkupGrammar.TokenPrimitive(@"value>""0"" && value<""10"" ?""valid"":""error""") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_DifferentPrefixSameNamespace_ReturnsSequence() { const string input = @"<foo xmlns=""http://example.org"" xmlns:blah=""http://example.org"" blah:key=""value"" />"; var expected = new[] { MarkupGrammar.TokenElementVoid(new DataName("foo", String.Empty, "http://example.org")), MarkupGrammar.TokenAttribute(new DataName("key", "blah", "http://example.org")), MarkupGrammar.TokenPrimitive("value") }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_SingleAttributeWhitespace_ReturnsSequence() { const string input = @"<root whitespace="" this contains whitespace ""></root>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("root")), MarkupGrammar.TokenAttribute(new DataName("whitespace")), MarkupGrammar.TokenPrimitive(" this contains whitespace "), MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_AttributeWhitespaceQuotDelims_ReturnsSequence() { const string input = @"<root white = "" extra whitespace around quote delims "" ></root>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("root")), MarkupGrammar.TokenAttribute(new DataName("white")), MarkupGrammar.TokenPrimitive(" extra whitespace around quote delims "), MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_SingleAttributeEmptyValue_ReturnsSequence() { const string input = @"<root emptyValue=""""></root>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("root")), MarkupGrammar.TokenAttribute(new DataName("emptyValue")), MarkupGrammar.TokenPrimitive(String.Empty), MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_ParentAndChildSharePrefixedNamespace_ReturnsSequence() { const string input = @"<bar:foo xmlns:bar=""http://example.org""><bar:child>value</bar:child></bar:foo>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("foo", "bar", "http://example.org")), MarkupGrammar.TokenElementBegin(new DataName("child", "bar", "http://example.org")), MarkupGrammar.TokenPrimitive("value"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_ParentAndChildDifferentDefaultNamespaces_ReturnsSequence() { const string input = @"<foo xmlns=""http://json.org""><child xmlns=""http://jsonfx.net"">text value</child></foo>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("foo", String.Empty, "http://json.org")), MarkupGrammar.TokenElementBegin(new DataName("child", String.Empty, "http://jsonfx.net")), MarkupGrammar.TokenPrimitive("text value"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_NamespacedChildTag_ReturnsSequence() { const string input = @"<foo><child xmlns=""http://example.com/schema"">value</child></foo>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("foo")), MarkupGrammar.TokenElementBegin(new DataName("child", String.Empty, "http://example.com/schema")), MarkupGrammar.TokenPrimitive("value"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_MultipleAttributes_ReturnsSequence() { const string input = @"<root no-value="""" whitespace="" this contains whitespace "" anyQuotedText=""" + "/\\\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" + @"""></root>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("root")), MarkupGrammar.TokenAttribute(new DataName("anyQuotedText")), MarkupGrammar.TokenPrimitive("/\\\uCAFE\uBABE\uAB98\uFCDE\uBCDA\uEF4A `1~!@#$%^&*()_+-=[]{}|;:',./<>?"), MarkupGrammar.TokenAttribute(new DataName("no-value")), MarkupGrammar.TokenPrimitive(String.Empty), MarkupGrammar.TokenAttribute(new DataName("whitespace")), MarkupGrammar.TokenPrimitive(" this contains whitespace "), MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_NestedDefaultNamespaces_ReturnsSequence() { const string input = @"<outer xmlns=""http://example.org/outer""><middle-1 xmlns=""http://example.org/inner""><inner>this should be inner</inner></middle-1><middle-2>this should be outer</middle-2></outer>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("outer", String.Empty, "http://example.org/outer")), MarkupGrammar.TokenElementBegin(new DataName("middle-1", String.Empty, "http://example.org/inner")), MarkupGrammar.TokenElementBegin(new DataName("inner", String.Empty, "http://example.org/inner")), MarkupGrammar.TokenPrimitive("this should be inner"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementBegin(new DataName("middle-2", String.Empty, "http://example.org/outer")), MarkupGrammar.TokenPrimitive("this should be outer"), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_UndeclaredPrefixes_ThrowsDeserializationException() { const string input = @"<a:one><b:two><c:three></d:three></e:two></f:one>"; var expected = new[] { MarkupGrammar.TokenElementBegin(new DataName("one")), MarkupGrammar.TokenElementBegin(new DataName("two")), MarkupGrammar.TokenElementBegin(new DataName("three")), MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd, MarkupGrammar.TokenElementEnd }; var tokenizer = new XmlReader.XmlTokenizer(); DeserializationException ex = Assert.Throws <DeserializationException>( delegate() { var actual = tokenizer.GetTokens(input).ToArray(); }); Assert.Equal(2, ex.Index); }