예제 #1
0
        public void NextAfterEndDocumentThrows()
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null"));

            Assert.Equal(JsonToken.Null, tokenizer.Next());
            Assert.Equal(JsonToken.EndDocument, tokenizer.Next());
            Assert.Throws <InvalidOperationException>(() => tokenizer.Next());
        }
예제 #2
0
        public void SkipValue(string json)
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json.Replace('\'', '"')));

            Assert.Equal(JsonToken.StartObject, tokenizer.Next());
            Assert.Equal("skip", tokenizer.Next().StringValue);
            tokenizer.SkipValue();
            Assert.Equal("next", tokenizer.Next().StringValue);
        }
예제 #3
0
        public void PeekThenNext()
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader("[1, 2]"));

            Assert.Equal(JsonToken.StartArray, tokenizer.Next());
            // Peek at the 1
            Assert.Equal(JsonToken.Value(1), tokenizer.Peek());
            // Now consume it
            Assert.Equal(JsonToken.Value(1), tokenizer.Next());
            Assert.Equal(JsonToken.Value(2), tokenizer.Next());
            Assert.Equal(JsonToken.EndArray, tokenizer.Next());
            Assert.Equal(JsonToken.EndDocument, tokenizer.Next());
        }
예제 #4
0
        public void InvalidStructure(string json, int expectedValidTokens)
        {
            // Note: we don't test that the earlier tokens are exactly as expected,
            // partly because that's hard to parameterize.
            var reader    = new StringReader(json.Replace('\'', '"'));
            var tokenizer = JsonTokenizer.FromTextReader(reader);

            for (int i = 0; i < expectedValidTokens; i++)
            {
                Assert.NotNull(tokenizer.Next());
            }
            Assert.Throws <InvalidJsonException>(() => tokenizer.Next());
        }
예제 #5
0
        private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens)
        {
            var reader    = new StringReader(json);
            var tokenizer = JsonTokenizer.FromTextReader(reader);

            for (int i = 0; i < expectedTokens.Length; i++)
            {
                var actualToken = tokenizer.Next();
                if (actualToken == JsonToken.EndDocument)
                {
                    Assert.False(true, $"Expected {expectedTokens[i]} but reached end of document");
                }
                Assert.Equal(expectedTokens[i], actualToken);
            }
            Assert.Throws <InvalidJsonException>(() => tokenizer.Next());
        }
예제 #6
0
        /// <summary>
        /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
        /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character
        /// replacement on the specified JSON, and should be used when the text contains apostrophes which
        /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of
        /// expected tokens, but is implicit.
        /// </summary>
        private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens)
        {
            var reader    = new StringReader(json);
            var tokenizer = JsonTokenizer.FromTextReader(reader);

            for (int i = 0; i < expectedTokens.Length; i++)
            {
                var actualToken = tokenizer.Next();
                if (actualToken == JsonToken.EndDocument)
                {
                    Assert.False(true, $"Expected {expectedTokens[i]} but reached end of token stream");
                }
                Assert.Equal(expectedTokens[i], actualToken);
            }
            var finalToken = tokenizer.Next();

            if (finalToken != JsonToken.EndDocument)
            {
                Assert.False(true, "Expected token stream to be exhausted; received ${finalToken}");
            }
        }