public void ObjectDepth() { string json = "{ \"foo\": { \"x\": 1, \"y\": [ 0 ] } }"; var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); // If we had more tests like this, I'd introduce a helper method... but for one test, it's not worth it. Assert.AreEqual(0, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); Assert.AreEqual(1, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next()); Assert.AreEqual(1, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.Value(1), tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.Name("y"), tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.StartArray, tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); // Depth hasn't changed in array Assert.AreEqual(JsonToken.Value(0), tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.EndArray, tokenizer.Next()); Assert.AreEqual(2, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.EndObject, tokenizer.Next()); Assert.AreEqual(1, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.EndObject, tokenizer.Next()); Assert.AreEqual(0, tokenizer.ObjectDepth); Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); Assert.AreEqual(0, tokenizer.ObjectDepth); }
public void NextAfterEndDocumentThrows() { var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null")); Assert.AreEqual(JsonToken.Null, tokenizer.Next()); Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); Assert.Throws <InvalidOperationException>(() => tokenizer.Next()); }
public void SkipValue(string json) { var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json.Replace('\'', '"'))); Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); Assert.AreEqual("skip", tokenizer.Next().StringValue); tokenizer.SkipValue(); Assert.AreEqual("next", tokenizer.Next().StringValue); }
/// <summary> /// Parses JSON read from <paramref name="jsonReader"/> and merges the information into the given message. /// </summary> /// <param name="message">The message to merge the JSON information into.</param> /// <param name="jsonReader">Reader providing the JSON to parse.</param> internal void Merge(IMessage message, TextReader jsonReader) { var tokenizer = JsonTokenizer.FromTextReader(jsonReader); Merge(message, tokenizer); var lastToken = tokenizer.Next(); if (lastToken != JsonToken.EndDocument) { throw new InvalidProtocolBufferException("Expected end of JSON after object"); } }
public void InvalidStructure(string json, int expectedValidTokens) { // Note: we don't test that the earlier tokens are exactly as expected, // partly because that's hard to parameterize. var reader = new StringReader(json.Replace('\'', '"')); var tokenizer = JsonTokenizer.FromTextReader(reader); for (int i = 0; i < expectedValidTokens; i++) { Assert.IsNotNull(tokenizer.Next()); } Assert.Throws <InvalidJsonException>(() => tokenizer.Next()); }
private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens) { var reader = new StringReader(json); var tokenizer = JsonTokenizer.FromTextReader(reader); for (int i = 0; i < expectedTokens.Length; i++) { var actualToken = tokenizer.Next(); if (actualToken == JsonToken.EndDocument) { Assert.Fail("Expected {0} but reached end of document", expectedTokens[i]); } Assert.AreEqual(expectedTokens[i], actualToken); } Assert.Throws <InvalidJsonException>(() => tokenizer.Next()); }
/// <summary> /// Asserts that the specified JSON is tokenized into the given sequence of tokens. /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character /// replacement on the specified JSON, and should be used when the text contains apostrophes which /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of /// expected tokens, but is implicit. /// </summary> private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens) { var reader = new StringReader(json); var tokenizer = JsonTokenizer.FromTextReader(reader); for (int i = 0; i < expectedTokens.Length; i++) { var actualToken = tokenizer.Next(); if (actualToken == JsonToken.EndDocument) { Assert.Fail("Expected {0} but reached end of token stream", expectedTokens[i]); } Assert.AreEqual(expectedTokens[i], actualToken); } var finalToken = tokenizer.Next(); if (finalToken != JsonToken.EndDocument) { Assert.Fail("Expected token stream to be exhausted; received {0}", finalToken); } }
public void ObjectDepth_WithPushBack() { string json = "{}"; var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); Assert.AreEqual(0, tokenizer.ObjectDepth); var token = tokenizer.Next(); Assert.AreEqual(1, tokenizer.ObjectDepth); // When we push back a "start object", we should effectively be back to the previous depth. tokenizer.PushBack(token); Assert.AreEqual(0, tokenizer.ObjectDepth); // Read the same token again, and get back to depth 1 token = tokenizer.Next(); Assert.AreEqual(1, tokenizer.ObjectDepth); // Now the same in reverse, with EndObject token = tokenizer.Next(); Assert.AreEqual(0, tokenizer.ObjectDepth); tokenizer.PushBack(token); Assert.AreEqual(1, tokenizer.ObjectDepth); tokenizer.Next(); Assert.AreEqual(0, tokenizer.ObjectDepth); }