public void ConvertJson2Bson_HelloWorld_RoundTripsJsonToBsonAndBack() { // input from example at http://bsonspec.org/#/specification var inputText = @"{ ""hello"" : ""world"" }"; var expectedBinary = Encoding.UTF8.GetBytes( "\x16\x00\x00\x00\x02hello\x00" + "\x06\x00\x00\x00world\x00\x00"); var expectedText = @"{""hello"":""world""}"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputText); var bsonFormatter = new BsonWriter.BsonFormatter(); var actualBinary = bsonFormatter.Format(tokens1); Assert.Equal(expectedBinary, actualBinary); var bsonTokenizer = new BsonReader.BsonTokenizer(); var tokens2 = bsonTokenizer.GetTokens(actualBinary); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualText = jsonFormatter.Format(tokens2); Assert.Equal(expectedText, actualText); }
public void ConvertJson2Xml_HelloWorld_RoundTripsJsonToXmlAndBack() { // input from example at http://xmlspec.org/#/specification var inputJson = @"{ ""hello"" : ""world"" }"; var expectedXml = @"<object><hello>world</hello></object>"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputJson); var writerSettings = new DataWriterSettings { PrettyPrint = false }; var xmlFormatter = new TransformFormatter<ModelTokenType, MarkupTokenType>(new XmlWriter.XmlFormatter(writerSettings), new XmlWriter.XmlOutTransformer(writerSettings)); var actualXml = xmlFormatter.Format(tokens1); Assert.Equal(expectedXml, actualXml); var expectedJson = @"{""hello"":""world""}"; var readerSettings = new DataReaderSettings(writerSettings.Resolver); var xmlTokenizer = new TransformTokenizer<MarkupTokenType, ModelTokenType>(new XmlReader.XmlTokenizer(), new XmlReader.XmlInTransformer(readerSettings)); var tokens2 = xmlTokenizer.GetTokens(actualXml); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualJson = jsonFormatter.Format(tokens2); Assert.Equal(expectedJson, actualJson); }
public void ConvertJson2Xml_HelloWorld_RoundTripsJsonToXmlAndBack() { // input from example at http://xmlspec.org/#/specification var inputJson = @"{ ""hello"" : ""world"" }"; var expectedXml = @"<object><hello>world</hello></object>"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputJson); var writerSettings = new DataWriterSettings { PrettyPrint = false }; var xmlFormatter = new TransformFormatter <ModelTokenType, MarkupTokenType>(new XmlWriter.XmlFormatter(writerSettings), new XmlWriter.XmlOutTransformer(writerSettings)); var actualXml = xmlFormatter.Format(tokens1); Assert.Equal(expectedXml, actualXml); var expectedJson = @"{""hello"":""world""}"; var readerSettings = new DataReaderSettings(writerSettings.Resolver); var xmlTokenizer = new TransformTokenizer <MarkupTokenType, ModelTokenType>(new XmlReader.XmlTokenizer(), new XmlReader.XmlInTransformer(readerSettings)); var tokens2 = xmlTokenizer.GetTokens(actualXml); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualJson = jsonFormatter.Format(tokens2); Assert.Equal(expectedJson, actualJson); }
public void ConvertJson2Bson_BooleanValue_RoundTripsJsonToBsonAndBack() { // input from example at http://codebetter.com/blogs/karlseguin/archive/2010/03/05/bson-serialization.aspx var inputText = @"{valid:true}"; var expectedBinary = new byte[] { 13, 0, 0, 0, 8, 118, 97, 108, 105, 100, 0, 1, 0 }; var expectedText = @"{ ""valid"" : true }"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputText); var bsonFormatter = new BsonWriter.BsonFormatter(); var actualBinary = bsonFormatter.Format(tokens1); Assert.Equal(expectedBinary, actualBinary); var bsonTokenizer = new BsonReader.BsonTokenizer(); var tokens2 = bsonTokenizer.GetTokens(actualBinary); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = true }); var actualText = jsonFormatter.Format(tokens2); Assert.Equal(expectedText, actualText); }
public void ConvertJson2Bson_HelloWorld_RoundTripsJsonToBsonAndBack() { // input from example at http://bsonspec.org/#/specification var inputText = @"{ ""hello"" : ""world"" }"; var expectedBinary = Encoding.UTF8.GetBytes( "\x16\x00\x00\x00\x02hello\x00"+ "\x06\x00\x00\x00world\x00\x00"); var expectedText = @"{""hello"":""world""}"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputText); var bsonFormatter = new BsonWriter.BsonFormatter(); var actualBinary = bsonFormatter.Format(tokens1); Assert.Equal(expectedBinary, actualBinary); var bsonTokenizer = new BsonReader.BsonTokenizer(); var tokens2 = bsonTokenizer.GetTokens(actualBinary); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualText = jsonFormatter.Format(tokens2); Assert.Equal(expectedText, actualText); }
public void GetTokens_ArrayEmpty_ReturnsEmptyArrayTokens() { const string input = "[]"; var expected = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void ConvertJson2Bson_ArrayAsProperty_RoundTripsJsonToBsonAndBack() { // input from example at http://bsonspec.org/#/specification var inputText = @"{ ""BSON"" : [ ""awesome"", 5.05, 1986 ] }"; var expectedBinary = new byte[] { 0x31, 0x00, 0x00, 0x00, 0x04, (byte)'B', (byte)'S', (byte)'O', (byte)'N', 0x00, 0x26, 0x00, 0x00, 0x00, 0x02, (byte)'0', 0, 0x08, 0x00, 0x00, 0x00, (byte)'a', (byte)'w', (byte)'e', (byte)'s', (byte)'o', (byte)'m', (byte)'e', 0x00, 0x01, (byte)'1', 0, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x14, 0x40, 0x10, (byte)'2', 0, 0xC2, 0x07, 0x00, 0x00, 0x00, 0x00 }; var expectedText = @"{""BSON"":[""awesome"",5.05,1986]}"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputText); var bsonFormatter = new BsonWriter.BsonFormatter(); var actualBinary = bsonFormatter.Format(tokens1); Assert.Equal(expectedBinary, actualBinary); var bsonTokenizer = new BsonReader.BsonTokenizer(); var tokens2 = bsonTokenizer.GetTokens(actualBinary); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualText = jsonFormatter.Format(tokens2); Assert.Equal(expectedText, actualText); }
public void ConvertJson2Xml_ArrayAsProperty_RoundTripsJsonToXmlAndBack() { // input from example at http://xmlspec.org/#/specification var inputJson = @"{ ""BSON"" : [ ""awesome"", 5.05, 1986 ] }"; var expectedXml = @"<object><BSON><item>awesome</item><item>5.05</item><item>1986</item></BSON></object>"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputJson); var writerSettings = new DataWriterSettings { PrettyPrint = false }; var xmlFormatter = new TransformFormatter <ModelTokenType, MarkupTokenType>(new XmlWriter.XmlFormatter(writerSettings), new XmlWriter.XmlOutTransformer(writerSettings)); var actualXml = xmlFormatter.Format(tokens1); Assert.Equal(expectedXml, actualXml); var expectedJson = @"{""BSON"":[""awesome"",5.05,1986]}"; var readerSettings = new DataReaderSettings(writerSettings.Resolver); var xmlTokenizer = new TransformTokenizer <MarkupTokenType, ModelTokenType>(new XmlReader.XmlTokenizer(), new XmlReader.XmlInTransformer(readerSettings)); var tokens2 = xmlTokenizer.GetTokens(actualXml); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualJson = jsonFormatter.Format(tokens2); Assert.Equal(expectedJson, actualJson); }
public void ConvertJson2Xml_BooleanValue_RoundTripsJsonToXmlAndBack() { // input from example at http://codebetter.com/blogs/karlseguin/archive/2010/03/05/xml-serialization.aspx var inputJson = @"{valid:true}"; var expectedXml = @"<object> <valid>true</valid> </object>"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputJson); var writerSettings = new DataWriterSettings { PrettyPrint = true }; var xmlFormatter = new TransformFormatter <ModelTokenType, MarkupTokenType>(new XmlWriter.XmlFormatter(writerSettings), new XmlWriter.XmlOutTransformer(writerSettings)); var actualXml = xmlFormatter.Format(tokens1); Assert.Equal(expectedXml, actualXml); var expectedJson = @"{ ""valid"" : ""true"" }"; var readerSettings = new DataReaderSettings(writerSettings.Resolver); var xmlTokenizer = new TransformTokenizer <MarkupTokenType, ModelTokenType>(new XmlReader.XmlTokenizer(), new XmlReader.XmlInTransformer(readerSettings)); var tokens2 = xmlTokenizer.GetTokens(actualXml); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = true }); var actualJson = jsonFormatter.Format(tokens2); Assert.Equal(expectedJson, actualJson); }
public void GetTokens_NumberFloatMissingFractional_ThrowsDeserializationException() { const string input = @"123.e5"; var tokenizer = new JsonReader.JsonTokenizer(); DeserializationException ex = Assert.Throws<DeserializationException>( delegate { var actual = tokenizer.GetTokens(input).ToArray(); }); // verify exception is coming from expected index Assert.Equal(0, ex.Index); }
public void GetTokens_NumberHexValue_ThrowsDeserializationException() { // input from fail14.json in test suite at http://www.json.org/JSON_checker/ const string input = @"{""Numbers cannot be hex"": 0x14}"; var tokenizer = new JsonReader.JsonTokenizer(); DeserializationException ex = Assert.Throws<DeserializationException>( delegate { var actual = tokenizer.GetTokens(input).ToArray(); }); // verify exception is coming from expected index Assert.Equal(26, ex.Index); }
public void GetTokens_NumberFloatExtraExpSign_ThrowsDeserializationException() { // input from fail31.json in test suite at http://www.json.org/JSON_checker/ const string input = @"[0e+-1]"; var tokenizer = new JsonReader.JsonTokenizer(); DeserializationException ex = Assert.Throws<DeserializationException>( delegate { var actual = tokenizer.GetTokens(input).ToArray(); }); // verify exception is coming from expected index Assert.Equal(1, ex.Index); }
public void GetTokens_NumberNegNoLeadingDigitFloat_ReturnsNumberToken() { const string input = "-.123456"; var expected = new [] { ModelGrammar.TokenPrimitive(-.123456) }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_NumberIntegerLeadingZero_ReturnsObjectTokensWithNumberValue() { // input from fail13.json in test suite at http://www.json.org/JSON_checker/ const string input = @"{""Numbers cannot have leading zeroes"": 013}"; var expected = new [] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("Numbers cannot have leading zeroes"), ModelGrammar.TokenPrimitive(13), ModelGrammar.TokenObjectEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_HighControl_Accepted() { const string input = "\"\u0082\""; var expected = new [] { ModelGrammar.TokenPrimitive("\u0082") }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_StringEmpty_ReturnsEmptyStringToken() { const string input = "\"\""; var expected = new [] { ModelGrammar.TokenPrimitive(String.Empty) }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_LiteralNonQuotedKeyDollarSign_ReturnsObjectTokensWithLiteralKey() { const string input = @"{ $abcdefg0123456 : false }"; var expected = new [] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("$abcdefg0123456"), ModelGrammar.TokenFalse, ModelGrammar.TokenObjectEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_KeywordUndefined_ReturnsUndefinedToken() { const string input = @"undefined"; var expected = new [] { ModelGrammar.TokenNull }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_StringUnescapedSingleQuote_ReturnsStringToken() { const string input = @"""unescaped ' single quote"""; var expected = new [] { ModelGrammar.TokenPrimitive("unescaped ' single quote"), }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_StringUnescapedDoubleQuote_ReturnsStringToken() { const string input = @"'unescaped "" quote'"; var expected = new [] { ModelGrammar.TokenPrimitive("unescaped \" quote"), }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_StringSingleQuote_ReturnsStringToken() { // input from fail24.json in test suite at http://www.json.org/JSON_checker/ const string input = @"['single quote']"; var expected = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("single quote"), ModelGrammar.TokenArrayEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_StringUnrecognizedEscapeNull_CharIgnored() { // input from fail17.json in test suite at http://www.json.org/JSON_checker/ const string input = @"[""Illegal backslash escape: \017""]"; var expected = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("Illegal backslash escape: 17"), ModelGrammar.TokenArrayEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_StringImproperlyEscapedChars_ReturnsStringTokenWithSimpleChars() { const string input = @"""\u\u1\u12\u123\u12345"""; var expected = new [] { ModelGrammar.TokenPrimitive("uu1u12u123\u12345") }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_ArrayMultiItemWhitespace_ReturnsSimpleArrayTokens() { const string input = "[ 0, null, false,true ]"; var expected = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenNull, ModelGrammar.TokenFalse, ModelGrammar.TokenTrue, ModelGrammar.TokenArrayEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_StringSimple_ReturnsStringToken() { // input from fail1.json in test suite at http://www.json.org/JSON_checker/ const string input = @"""A JSON payload should be an object or array, not a string."""; var expected = new [] { ModelGrammar.TokenPrimitive("A JSON payload should be an object or array, not a string.") }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_LiteralNonQuotedKey_ReturnsObjectTokensWithLiteralKey() { // input from fail3.json in test suite at http://www.json.org/JSON_checker/ const string input = @"{unquoted_key: ""keys must be quoted""}"; var expected = new [] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("unquoted_key"), ModelGrammar.TokenPrimitive("keys must be quoted"), ModelGrammar.TokenObjectEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_StringEscapedChars_ReturnsStringToken() { const string input = @"""\\\b\f\n\r\t\u0123\u4567\u89AB\uCDEF\uabcd\uef4A\"""""; var expected = new [] { ModelGrammar.TokenPrimitive("\\\b\f\n\r\t\u0123\u4567\u89AB\uCDEF\uabcd\uef4A\"") }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_LiteralNonQuotedKeyNumber_ReturnsObjectTokensWithLiteralKey() { const string input = @"{ _123456 : true }"; var expected = new [] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("_123456"), ModelGrammar.TokenTrue, ModelGrammar.TokenObjectEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_ObjectCommaInsteadOfColon_ProducesInvalidSequence() { // input from fail21.json in test suite at http://www.json.org/JSON_checker/ var input = @"{""Comma instead of colon"", null}"; var expected = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenPrimitive("Comma instead of colon"), ModelGrammar.TokenNull, ModelGrammar.TokenObjectEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_KeywordNull_ReturnsNullToken() { const string input = @"null"; var expected = new [] { ModelGrammar.TokenNull }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_StringUnterminated_ThrowsDeserializationException() { const string input = @"""unterminated"; var tokenizer = new JsonReader.JsonTokenizer(); DeserializationException ex = Assert.Throws<DeserializationException>( delegate { var actual = tokenizer.GetTokens(input).ToArray(); }); // verify exception is coming from expected index Assert.Equal(0, ex.Index); }
public void ConvertJson2Xml_ComplexGraph_RoundTripsJsonToXmlAndBack() { // input from pass1.json in test suite at http://www.json.org/JSON_checker/ const string inputJson = @"[ ""JSON Test Pattern pass1"", {""object with 1 member"":[""array with 1 element""]}, {}, [], -42, true, false, null, { ""integer"": 1234567890, ""real"": -9876.543210, ""e"": 0.123456789e-12, ""E"": 1.234567890E+34, """": 23456789012E66, ""zero"": 0, ""one"": 1, ""space"": "" "", ""quote"": ""\"""", ""backslash"": ""\\"", ""controls"": ""\b\f\n\r\t"", ""slash"": ""/ & \/"", ""alpha"": ""abcdefghijklmnopqrstuvwyz"", ""ALPHA"": ""ABCDEFGHIJKLMNOPQRSTUVWYZ"", ""digit"": ""0123456789"", ""0123456789"": ""digit"", ""special"": ""`1~!@#$%^&*()_+-={':[,]}|;.</>?"", ""hex"": ""\u0123\u4567\u89AB\uCDEF\uabcd\uef4A"", ""true"": true, ""false"": false, ""null"": null, ""array"":[ ], ""object"":{ }, ""address"": ""50 St. James Street"", ""url"": ""http://www.JSON.org/"", ""comment"": ""// /* <!-- --"", ""# -- --> */"": "" "", "" s p a c e d "" :[1,2 , 3 , 4 , 5 , 6 ,7 ],""compact"":[1,2,3,4,5,6,7], ""jsontext"": ""{\""object with 1 member\"":[\""array with 1 element\""]}"", ""quotes"": """ \u0022 %22 0x22 034 """, ""\/\\\""\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"" : ""A key can be any string"" }, 0.5 ,98.6 , 99.44 , 1066, 1e1, 0.1e1, 1e-1, 1e00,2e+00,2e-00 ,""rosebud""]"; var expectedXml = @"<array> <item>JSON Test Pattern pass1</item> <item> <object_x0020_with_x0020_1_x0020_member> <item>array with 1 element</item> </object_x0020_with_x0020_1_x0020_member> </item> <item /> <item /> <item>-42</item> <item>true</item> <item>false</item> <item /> <item> <integer>1234567890</integer> <real>-9876.54321</real> <e>1.23456789e-13</e> <E>1.23456789e+34</E> <double>2.3456789012e+76</double> <zero>0</zero> <one>1</one> <space> </space> <quote>""</quote> <backslash>\</backslash> <controls>" + "\n\r\t" + @"</controls> <slash>/ & /</slash> <alpha>abcdefghijklmnopqrstuvwyz</alpha> <ALPHA>ABCDEFGHIJKLMNOPQRSTUVWYZ</ALPHA> <digit>0123456789</digit> <_x0030_123456789>digit</_x0030_123456789> <special>`1~!@#$%^&*()_+-={':[,]}|;.</>?</special> <hex>" + "\u0123\u4567\u89AB\uCDEF\uABCD\uEF4A" + @"</hex> <true>true</true> <false>false</false> <null /> <array /> <object /> <address>50 St. James Street</address> <url>http://www.JSON.org/</url> <comment>// /* <!-- --</comment> <_x0023__x0020_--_x0020_--_x003E__x0020__x002A__x002F_> </_x0023__x0020_--_x0020_--_x003E__x0020__x002A__x002F_> <_x0020_s_x0020_p_x0020_a_x0020_c_x0020_e_x0020_d_x0020_> <item>1</item> <item>2</item> <item>3</item> <item>4</item> <item>5</item> <item>6</item> <item>7</item> </_x0020_s_x0020_p_x0020_a_x0020_c_x0020_e_x0020_d_x0020_> <compact> <item>1</item> <item>2</item> <item>3</item> <item>4</item> <item>5</item> <item>6</item> <item>7</item> </compact> <jsontext>{""object with 1 member"":[""array with 1 element""]}</jsontext> <quotes>&#34; "" %22 0x22 034 &#x22;</quotes> <" + "_x002F__x005C__x0022_\uCAFE\uBABE_xAB98__xFCDE_\uBCDA_xEF4A__x0008__x000C__x000A__x000D__x0009__x0060_1_x007E__x0021__x0040__x0023__x0024__x0025__x005E__x0026__x002A__x0028__x0029___x002B_-_x003D__x005B__x005D__x007B__x007D__x007C__x003B__x003A__x0027__x002C_._x002F__x003C__x003E__x003F_" + @">A key can be any string</" + "_x002F__x005C__x0022_\uCAFE\uBABE_xAB98__xFCDE_\uBCDA_xEF4A__x0008__x000C__x000A__x000D__x0009__x0060_1_x007E__x0021__x0040__x0023__x0024__x0025__x005E__x0026__x002A__x0028__x0029___x002B_-_x003D__x005B__x005D__x007B__x007D__x007C__x003B__x003A__x0027__x002C_._x002F__x003C__x003E__x003F_" + @"> </item> <item>0.5</item> <item>98.6</item> <item>99.44</item> <item>1066</item> <item>10</item> <item>1</item> <item>0.1</item> <item>1</item> <item>2</item> <item>2</item> <item>rosebud</item> </array>"; var jsonTokenizer = new JsonReader.JsonTokenizer(); var tokens1 = jsonTokenizer.GetTokens(inputJson); var writerSettings = new DataWriterSettings { PrettyPrint = true }; var xmlFormatter = new TransformFormatter <ModelTokenType, MarkupTokenType>(new XmlWriter.XmlFormatter(writerSettings), new XmlWriter.XmlOutTransformer(writerSettings)); var actualXml = xmlFormatter.Format(tokens1); Assert.Equal(expectedXml, actualXml); const string expectedJson = @"[""JSON Test Pattern pass1"",{""object with 1 member"":[""array with 1 element""]},{},[],-42,true,false,null,{""integer"":1234567890,""real"":-9876.54321,""e"":1.23456789e-13,""E"":1.23456789e+34,"""":2.3456789012e+76,""zero"":0,""one"":1,""space"":"" "",""quote"":""\"""",""backslash"":""\\"",""controls"":""\b\f\n\r\t"",""slash"":""/ & /"",""alpha"":""abcdefghijklmnopqrstuvwyz"",""ALPHA"":""ABCDEFGHIJKLMNOPQRSTUVWYZ"",""digit"":""0123456789"",""0123456789"":""digit"",""special"":""`1~!@#$%^&*()_+-={':[,]}|;.\u003C/>?"",""hex"":""\u0123\u4567\u89AB\uCDEF\uABCD\uEF4A"",""true"":true,""false"":false,""null"":null,""array"":[],""object"":{},""address"":""50 St. James Street"",""url"":""http://www.JSON.org/"",""comment"":""// /* \u003C!-- --"",""# -- --> */"":"" "","" s p a c e d "":[1,2,3,4,5,6,7],""compact"":[1,2,3,4,5,6,7],""jsontext"":""{\""object with 1 member\"":[\""array with 1 element\""]}"",""quotes"":""" \"" %22 0x22 034 """,""/\\\""\uCAFE\uBABE\uAB98\uFCDE\uBCDA\uEF4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./\u003C>?"":""A key can be any string""},0.5,98.6,99.44,1066,10,1,0.1,1,2,2,""rosebud""]"; var readerSettings = new DataReaderSettings(writerSettings.Resolver); var xmlTokenizer = new TransformTokenizer <MarkupTokenType, ModelTokenType>(new XmlReader.XmlTokenizer(), new XmlReader.XmlInTransformer(readerSettings)); var tokens2 = xmlTokenizer.GetTokens(actualXml); var jsonFormatter = new JsonWriter.JsonFormatter(new DataWriterSettings { PrettyPrint = false }); var actualJson = jsonFormatter.Format(tokens2); Assert.Equal(expectedJson, actualJson); }
public void GetTokens_ObjectUnterminated_ProducesInvalidSequence() { // NOTE: analyzer must flag this as an error as is grammar error, not tokenization error // input from fail32.json in test suite at http://www.json.org/JSON_checker/ var input = @"{""Comma instead of closing brace"": true,"; var expected = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("Comma instead of closing brace"), ModelGrammar.TokenTrue }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); Assert.Equal(expected, actual); }
public void GetTokens_StringEscapedTabChar_ReturnsStringToken() { // input from fail26.json in test suite at http://www.json.org/JSON_checker/ const string input = @"[""\ tab\ character\ in\ string\ ""]"; var expected = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("\ttab\tcharacter\tin\tstring\t"), ModelGrammar.TokenArrayEnd }; var tokenizer = new JsonReader.JsonTokenizer(); var actual = tokenizer.GetTokens(input).ToArray(); // this is not allowed according to strict JSON, but we're following Postel's Law Assert.Equal(expected, actual); }
public void GetTokens_StringEscapedLineBreak_ThrowsDeserializationException() { // input from fail28.json in test suite at http://www.json.org/JSON_checker/ const string input = @"[""line\ break""]"; var tokenizer = new JsonReader.JsonTokenizer(); DeserializationException ex = Assert.Throws<DeserializationException>( delegate { var actual = tokenizer.GetTokens(input).ToArray(); }); // verify exception is coming from expected index Assert.Equal(1, ex.Index); }