示例#1
0
        public void ObjectDepth()
        {
            string json      = "{ \"foo\": { \"x\": 1, \"y\": [ 0 ] } }";
            var    tokenizer = JsonTokenizer.FromTextReader(new StringReader(json));

            // If we had more tests like this, I'd introduce a helper method... but for one test, it's not worth it.
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
            Assert.AreEqual(1, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next());
            Assert.AreEqual(1, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.Value(1), tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.Name("y"), tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.StartArray, tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth); // Depth hasn't changed in array
            Assert.AreEqual(JsonToken.Value(0), tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.EndArray, tokenizer.Next());
            Assert.AreEqual(2, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.EndObject, tokenizer.Next());
            Assert.AreEqual(1, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.EndObject, tokenizer.Next());
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
            Assert.AreEqual(0, tokenizer.ObjectDepth);
        }
示例#2
0
        private void MergeField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)
        {
            var token = tokenizer.Next();

            if (token.Type == JsonToken.TokenType.Null)
            {
                // Note: different from Java API, which just ignores it.
                // TODO: Bring it more in line? Discuss...
                field.Accessor.Clear(message);
                return;
            }
            tokenizer.PushBack(token);

            if (field.IsMap)
            {
                MergeMapField(message, field, tokenizer);
            }
            else if (field.IsRepeated)
            {
                MergeRepeatedField(message, field, tokenizer);
            }
            else
            {
                var value = ParseSingleValue(field, tokenizer);
                field.Accessor.SetValue(message, value);
            }
        }
示例#3
0
        private void MergeRepeatedField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)
        {
            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartArray)
            {
                throw new InvalidProtocolBufferException("Repeated field value was not an array. Token type: " + token.Type);
            }

            IList list = (IList)field.Accessor.GetValue(message);

            while (true)
            {
                token = tokenizer.Next();
                if (token.Type == JsonToken.TokenType.EndArray)
                {
                    return;
                }
                tokenizer.PushBack(token);
                if (token.Type == JsonToken.TokenType.Null)
                {
                    throw new InvalidProtocolBufferException("Repeated field elements cannot be null");
                }
                list.Add(ParseSingleValue(field, tokenizer));
            }
        }
示例#4
0
        private void MergeMapField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)
        {
            // Map fields are always objects, even if the values are well-known types: ParseSingleValue handles those.
            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartObject)
            {
                throw new InvalidProtocolBufferException("Expected an object to populate a map");
            }

            var type       = field.MessageType;
            var keyField   = type.FindFieldByNumber(1);
            var valueField = type.FindFieldByNumber(2);

            if (keyField == null || valueField == null)
            {
                throw new InvalidProtocolBufferException("Invalid map field: " + field.FullName);
            }
            IDictionary dictionary = (IDictionary)field.Accessor.GetValue(message);

            while (true)
            {
                token = tokenizer.Next();
                if (token.Type == JsonToken.TokenType.EndObject)
                {
                    return;
                }
                object key   = ParseMapKey(keyField, token.StringValue);
                object value = ParseSingleValue(valueField, tokenizer);
                // TODO: Null handling
                dictionary[key] = value;
            }
        }
示例#5
0
        private void MergeField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer)
        {
            var token = tokenizer.Next();

            if (token.Type == JsonToken.TokenType.Null)
            {
                // Clear the field if we see a null token, unless it's for a singular field of type
                // google.protobuf.Value.
                // Note: different from Java API, which just ignores it.
                // TODO: Bring it more in line? Discuss...
                if (field.IsMap || field.IsRepeated || !IsGoogleProtobufValueField(field))
                {
                    field.Accessor.Clear(message);
                    return;
                }
            }
            tokenizer.PushBack(token);

            if (field.IsMap)
            {
                MergeMapField(message, field, tokenizer);
            }
            else if (field.IsRepeated)
            {
                MergeRepeatedField(message, field, tokenizer);
            }
            else
            {
                var value = ParseSingleValue(field, tokenizer);
                field.Accessor.SetValue(message, value);
            }
        }
示例#6
0
        private void MergeAny(IMessage message, JsonTokenizer tokenizer)
        {
            // Record the token stream until we see the @type property. At that point, we can take the value, consult
            // the type registry for the relevant message, and replay the stream, omitting the @type property.
            var tokens = new List <JsonToken>();

            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartObject)
            {
                throw new InvalidProtocolBufferException("Expected object value for Any");
            }
            int typeUrlObjectDepth = tokenizer.ObjectDepth;

            // The check for the property depth protects us from nested Any values which occur before the type URL
            // for *this* Any.
            while (token.Type != JsonToken.TokenType.Name ||
                   token.StringValue != JsonFormatter.AnyTypeUrlField ||
                   tokenizer.ObjectDepth != typeUrlObjectDepth)
            {
                tokens.Add(token);
                token = tokenizer.Next();
            }

            // Don't add the @type property or its value to the recorded token list
            token = tokenizer.Next();
            if (token.Type != JsonToken.TokenType.StringValue)
            {
                throw new InvalidProtocolBufferException("Expected string value for Any.@type");
            }
            string typeUrl  = token.StringValue;
            string typeName = JsonFormatter.GetTypeName(typeUrl);

            MessageDescriptor descriptor = settings.TypeRegistry.Find(typeName);

            if (descriptor == null)
            {
                throw new InvalidOperationException($"Type registry has no descriptor for type name '{typeName}'");
            }

            // Now replay the token stream we've already read and anything that remains of the object, just parsing it
            // as normal. Our original tokenizer should end up at the end of the object.
            var replay = JsonTokenizer.FromReplayedTokens(tokens, tokenizer);
            var body   = descriptor.Parser.CreateTemplate();

            if (descriptor.IsWellKnownType)
            {
                MergeWellKnownTypeAnyBody(body, replay);
            }
            else
            {
                Merge(body, replay);
            }
            var data = body.ToByteString();

            // Now that we have the message data, we can pack it into an Any (the message received as a parameter).
            message.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.SetValue(message, typeUrl);
            message.Descriptor.Fields[Any.ValueFieldNumber].Accessor.SetValue(message, data);
        }
示例#7
0
        public void NextAfterEndDocumentThrows()
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null"));

            Assert.AreEqual(JsonToken.Null, tokenizer.Next());
            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
            Assert.Throws <InvalidOperationException>(() => tokenizer.Next());
        }
示例#8
0
        public void SkipValue(string json)
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json.Replace('\'', '"')));

            Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
            Assert.AreEqual("skip", tokenizer.Next().StringValue);
            tokenizer.SkipValue();
            Assert.AreEqual("next", tokenizer.Next().StringValue);
        }
示例#9
0
        public void CanPushBackEndDocument()
        {
            var tokenizer = new JsonTokenizer(new StringReader("null"));

            Assert.AreEqual(JsonToken.Null, tokenizer.Next());
            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
            tokenizer.PushBack(JsonToken.EndDocument);
            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
            Assert.Throws <InvalidOperationException>(() => tokenizer.Next());
        }
示例#10
0
        /// <summary>
        /// Parses JSON read from <paramref name="jsonReader"/> and merges the information into the given message.
        /// </summary>
        /// <param name="message">The message to merge the JSON information into.</param>
        /// <param name="jsonReader">Reader providing the JSON to parse.</param>
        internal void Merge(IMessage message, TextReader jsonReader)
        {
            var tokenizer = JsonTokenizer.FromTextReader(jsonReader);

            Merge(message, tokenizer);
            var lastToken = tokenizer.Next();

            if (lastToken != JsonToken.EndDocument)
            {
                throw new InvalidProtocolBufferException("Expected end of JSON after object");
            }
        }
示例#11
0
        public void InvalidStructure(string json, int expectedValidTokens)
        {
            // Note: we don't test that the earlier tokens are exactly as expected,
            // partly because that's hard to parameterize.
            var reader    = new StringReader(json.Replace('\'', '"'));
            var tokenizer = JsonTokenizer.FromTextReader(reader);

            for (int i = 0; i < expectedValidTokens; i++)
            {
                Assert.IsNotNull(tokenizer.Next());
            }
            Assert.Throws <InvalidJsonException>(() => tokenizer.Next());
        }
示例#12
0
        /// <summary>
        /// Merges the given message using data from the given tokenizer. In most cases, the next
        /// token should be a "start object" token, but wrapper types and nullity can invalidate
        /// that assumption. This is implemented as an LL(1) recursive descent parser over the stream
        /// of tokens provided by the tokenizer. This token stream is assumed to be valid JSON, with the
        /// tokenizer performing that validation - but not every token stream is valid "protobuf JSON".
        /// </summary>
        private void Merge(IMessage message, JsonTokenizer tokenizer)
        {
            if (message.Descriptor.IsWellKnownType)
            {
                Action <JsonParser, IMessage, JsonTokenizer> handler;
                if (WellKnownTypeHandlers.TryGetValue(message.Descriptor.FullName, out handler))
                {
                    handler(this, message, tokenizer);
                    return;
                }
                // Well-known types with no special handling continue in the normal way.
            }
            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartObject)
            {
                throw new InvalidProtocolBufferException("Expected an object");
            }
            var descriptor = message.Descriptor;
            // TODO: Make this more efficient, e.g. by building it once in the descriptor.
            // Additionally, we need to consider whether to parse field names in their original proto form,
            // and any overrides in the descriptor. But yes, all of this should be in the descriptor somehow...
            // the descriptor can expose the dictionary.
            var jsonFieldMap = descriptor.Fields.InDeclarationOrder().ToDictionary(field => JsonFormatter.ToCamelCase(field.Name));

            while (true)
            {
                token = tokenizer.Next();
                if (token.Type == JsonToken.TokenType.EndObject)
                {
                    return;
                }
                if (token.Type != JsonToken.TokenType.Name)
                {
                    throw new InvalidOperationException("Unexpected token type " + token.Type);
                }
                string          name = token.StringValue;
                FieldDescriptor field;
                if (jsonFieldMap.TryGetValue(name, out field))
                {
                    MergeField(message, field, tokenizer);
                }
                else
                {
                    // TODO: Is this what we want to do? If not, we'll need to skip the value,
                    // which may be an object or array. (We might want to put code in the tokenizer
                    // to do that.)
                    throw new InvalidProtocolBufferException("Unknown field: " + name);
                }
            }
        }
示例#13
0
        /// <summary>
        /// Merges the given message using data from the given tokenizer. In most cases, the next
        /// token should be a "start object" token, but wrapper types and nullity can invalidate
        /// that assumption. This is implemented as an LL(1) recursive descent parser over the stream
        /// of tokens provided by the tokenizer. This token stream is assumed to be valid JSON, with the
        /// tokenizer performing that validation - but not every token stream is valid "protobuf JSON".
        /// </summary>
        private void Merge(IMessage message, JsonTokenizer tokenizer)
        {
            if (tokenizer.ObjectDepth > settings.RecursionLimit)
            {
                throw InvalidProtocolBufferException.JsonRecursionLimitExceeded();
            }
            if (message.Descriptor.IsWellKnownType)
            {
                Action <JsonParser, IMessage, JsonTokenizer> handler;
                if (WellKnownTypeHandlers.TryGetValue(message.Descriptor.FullName, out handler))
                {
                    handler(this, message, tokenizer);
                    return;
                }
                // Well-known types with no special handling continue in the normal way.
            }
            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartObject)
            {
                throw new InvalidProtocolBufferException("Expected an object");
            }
            var descriptor   = message.Descriptor;
            var jsonFieldMap = descriptor.Fields.ByJsonName();

            while (true)
            {
                token = tokenizer.Next();
                if (token.Type == JsonToken.TokenType.EndObject)
                {
                    return;
                }
                if (token.Type != JsonToken.TokenType.Name)
                {
                    throw new InvalidOperationException("Unexpected token type " + token.Type);
                }
                string          name = token.StringValue;
                FieldDescriptor field;
                if (jsonFieldMap.TryGetValue(name, out field))
                {
                    MergeField(message, field, tokenizer);
                }
                else
                {
                    // TODO: Is this what we want to do? If not, we'll need to skip the value,
                    // which may be an object or array. (We might want to put code in the tokenizer
                    // to do that.)
                    throw new InvalidProtocolBufferException("Unknown field: " + name);
                }
            }
        }
示例#14
0
        private void MergeStruct(IMessage message, JsonTokenizer tokenizer)
        {
            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartObject)
            {
                throw new InvalidProtocolBufferException("Expected object value for Struct");
            }
            tokenizer.PushBack(token);

            var field = message.Descriptor.Fields[Struct.FieldsFieldNumber];

            MergeMapField(message, field, tokenizer);
        }
示例#15
0
        private void MergeStructValue(IMessage message, JsonTokenizer tokenizer)
        {
            var firstToken = tokenizer.Next();
            var fields     = message.Descriptor.Fields;

            switch (firstToken.Type)
            {
            case JsonToken.TokenType.Null:
                fields[Value.NullValueFieldNumber].Accessor.SetValue(message, 0);
                return;

            case JsonToken.TokenType.StringValue:
                fields[Value.StringValueFieldNumber].Accessor.SetValue(message, firstToken.StringValue);
                return;

            case JsonToken.TokenType.Number:
                fields[Value.NumberValueFieldNumber].Accessor.SetValue(message, firstToken.NumberValue);
                return;

            case JsonToken.TokenType.False:
            case JsonToken.TokenType.True:
                fields[Value.BoolValueFieldNumber].Accessor.SetValue(message, firstToken.Type == JsonToken.TokenType.True);
                return;

            case JsonToken.TokenType.StartObject:
            {
                var field         = fields[Value.StructValueFieldNumber];
                var structMessage = NewMessageForField(field);
                tokenizer.PushBack(firstToken);
                Merge(structMessage, tokenizer);
                field.Accessor.SetValue(message, structMessage);
                return;
            }

            case JsonToken.TokenType.StartArray:
            {
                var field = fields[Value.ListValueFieldNumber];
                var list  = NewMessageForField(field);
                tokenizer.PushBack(firstToken);
                Merge(list, tokenizer);
                field.Accessor.SetValue(message, list);
                return;
            }

            default:
                throw new InvalidOperationException("Unexpected token type: " + firstToken.Type);
            }
        }
示例#16
0
        private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens)
        {
            var reader    = new StringReader(json);
            var tokenizer = JsonTokenizer.FromTextReader(reader);

            for (int i = 0; i < expectedTokens.Length; i++)
            {
                var actualToken = tokenizer.Next();
                if (actualToken == JsonToken.EndDocument)
                {
                    Assert.Fail("Expected {0} but reached end of document", expectedTokens[i]);
                }
                Assert.AreEqual(expectedTokens[i], actualToken);
            }
            Assert.Throws <InvalidJsonException>(() => tokenizer.Next());
        }
示例#17
0
        // Well-known types end up in a property called "value" in the JSON. As there's no longer a @type property
        // in the given JSON token stream, we should *only* have tokens of start-object, name("value"), the value
        // itself, and then end-object.
        private void MergeWellKnownTypeAnyBody(IMessage body, JsonTokenizer tokenizer)
        {
            var token = tokenizer.Next(); // Definitely start-object; checked in previous method

            token = tokenizer.Next();
            // TODO: What about an absent Int32Value, for example?
            if (token.Type != JsonToken.TokenType.Name || token.StringValue != JsonFormatter.AnyWellKnownTypeValueField)
            {
                throw new InvalidProtocolBufferException($"Expected '{JsonFormatter.AnyWellKnownTypeValueField}' property for well-known type Any body");
            }
            Merge(body, tokenizer);
            token = tokenizer.Next();
            if (token.Type != JsonToken.TokenType.EndObject)
            {
                throw new InvalidProtocolBufferException($"Expected end-object token after @type/value for well-known type");
            }
        }
示例#18
0
        /// <summary>
        /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
        /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character
        /// replacement on the specified JSON, and should be used when the text contains apostrophes which
        /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of
        /// expected tokens, but is implicit.
        /// </summary>
        private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens)
        {
            var reader    = new StringReader(json);
            var tokenizer = JsonTokenizer.FromTextReader(reader);

            for (int i = 0; i < expectedTokens.Length; i++)
            {
                var actualToken = tokenizer.Next();
                if (actualToken == JsonToken.EndDocument)
                {
                    Assert.Fail("Expected {0} but reached end of token stream", expectedTokens[i]);
                }
                Assert.AreEqual(expectedTokens[i], actualToken);
            }
            var finalToken = tokenizer.Next();

            if (finalToken != JsonToken.EndDocument)
            {
                Assert.Fail("Expected token stream to be exhausted; received {0}", finalToken);
            }
        }
示例#19
0
        public void ObjectDepth_WithPushBack()
        {
            string json      = "{}";
            var    tokenizer = JsonTokenizer.FromTextReader(new StringReader(json));

            Assert.AreEqual(0, tokenizer.ObjectDepth);
            var token = tokenizer.Next();

            Assert.AreEqual(1, tokenizer.ObjectDepth);
            // When we push back a "start object", we should effectively be back to the previous depth.
            tokenizer.PushBack(token);
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            // Read the same token again, and get back to depth 1
            token = tokenizer.Next();
            Assert.AreEqual(1, tokenizer.ObjectDepth);

            // Now the same in reverse, with EndObject
            token = tokenizer.Next();
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            tokenizer.PushBack(token);
            Assert.AreEqual(1, tokenizer.ObjectDepth);
            tokenizer.Next();
            Assert.AreEqual(0, tokenizer.ObjectDepth);
        }
示例#20
0
        private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer)
        {
            var token = tokenizer.Next();

            if (token.Type == JsonToken.TokenType.Null)
            {
                if (field.FieldType == FieldType.Message && field.MessageType.FullName == Value.Descriptor.FullName)
                {
                    return(new Value {
                        NullValue = NullValue.NULL_VALUE
                    });
                }
                return(null);
            }

            var fieldType = field.FieldType;

            if (fieldType == FieldType.Message)
            {
                // Parse wrapper types as their constituent types.
                // TODO: What does this mean for null?
                if (field.MessageType.IsWrapperType)
                {
                    field     = field.MessageType.Fields[WrappersReflection.WrapperValueFieldNumber];
                    fieldType = field.FieldType;
                }
                else
                {
                    // TODO: Merge the current value in message? (Public API currently doesn't make this relevant as we don't expose merging.)
                    tokenizer.PushBack(token);
                    IMessage subMessage = NewMessageForField(field);
                    Merge(subMessage, tokenizer);
                    return(subMessage);
                }
            }

            switch (token.Type)
            {
            case JsonToken.TokenType.True:
            case JsonToken.TokenType.False:
                if (fieldType == FieldType.Bool)
                {
                    return(token.Type == JsonToken.TokenType.True);
                }
                // Fall through to "we don't support this type for this case"; could duplicate the behaviour of the default
                // case instead, but this way we'd only need to change one place.
                goto default;

            case JsonToken.TokenType.StringValue:
                return(ParseSingleStringValue(field, token.StringValue));

            // Note: not passing the number value itself here, as we may end up storing the string value in the token too.
            case JsonToken.TokenType.Number:
                return(ParseSingleNumberValue(field, token));

            case JsonToken.TokenType.Null:
                throw new NotImplementedException("Haven't worked out what to do for null yet");

            default:
                throw new InvalidProtocolBufferException("Unsupported JSON token type " + token.Type + " for field type " + fieldType);
            }
        }
示例#21
0
 internal JsonReplayTokenizer(List <JsonToken> tokens, JsonTokenizer nextTokenizer)
 {
     this.tokens        = tokens;
     this.nextTokenizer = nextTokenizer;
 }
示例#22
0
 /// <summary>
 /// Creates a tokenizer that first replays the given list of tokens, then continues reading
 /// from another tokenizer. Note that if the returned tokenizer is "pushed back", that does not push back
 /// on the continuation tokenizer, or vice versa. Care should be taken when using this method - it was
 /// created for the sake of Any parsing.
 /// </summary>
 internal static JsonTokenizer FromReplayedTokens(List <JsonToken> tokens, JsonTokenizer continuation)
 {
     return(new JsonReplayTokenizer(tokens, continuation));
 }
示例#23
0
        /// <summary>
        /// Merges the given message using data from the given tokenizer. In most cases, the next
        /// token should be a "start object" token, but wrapper types and nullity can invalidate
        /// that assumption. This is implemented as an LL(1) recursive descent parser over the stream
        /// of tokens provided by the tokenizer. This token stream is assumed to be valid JSON, with the
        /// tokenizer performing that validation - but not every token stream is valid "protobuf JSON".
        /// </summary>
        private void Merge(IMessage message, JsonTokenizer tokenizer)
        {
            if (tokenizer.ObjectDepth > settings.RecursionLimit)
            {
                throw InvalidProtocolBufferException.JsonRecursionLimitExceeded();
            }
            if (message.Descriptor.IsWellKnownType)
            {
                Action <JsonParser, IMessage, JsonTokenizer> handler;
                if (WellKnownTypeHandlers.TryGetValue(message.Descriptor.FullName, out handler))
                {
                    handler(this, message, tokenizer);
                    return;
                }
                // Well-known types with no special handling continue in the normal way.
            }
            var token = tokenizer.Next();

            if (token.Type != JsonToken.TokenType.StartObject)
            {
                throw new InvalidProtocolBufferException("Expected an object");
            }
            var descriptor   = message.Descriptor;
            var jsonFieldMap = descriptor.Fields.ByJsonName();
            // All the oneof fields we've already accounted for - we can only see each of them once.
            // The set is created lazily to avoid the overhead of creating a set for every message
            // we parsed, when oneofs are relatively rare.
            HashSet <OneofDescriptor> seenOneofs = null;

            while (true)
            {
                token = tokenizer.Next();
                if (token.Type == JsonToken.TokenType.EndObject)
                {
                    return;
                }
                if (token.Type != JsonToken.TokenType.Name)
                {
                    throw new InvalidOperationException("Unexpected token type " + token.Type);
                }
                string          name = token.StringValue;
                FieldDescriptor field;
                if (jsonFieldMap.TryGetValue(name, out field))
                {
                    if (field.ContainingOneof != null)
                    {
                        if (seenOneofs == null)
                        {
                            seenOneofs = new HashSet <OneofDescriptor>();
                        }
                        if (!seenOneofs.Add(field.ContainingOneof))
                        {
                            throw new InvalidProtocolBufferException($"Multiple values specified for oneof {field.ContainingOneof.Name}");
                        }
                    }
                    MergeField(message, field, tokenizer);
                }
                else
                {
                    // TODO: Is this what we want to do? If not, we'll need to skip the value,
                    // which may be an object or array. (We might want to put code in the tokenizer
                    // to do that.)
                    throw new InvalidProtocolBufferException("Unknown field: " + name);
                }
            }
        }
示例#24
0
 /// <summary>
 /// Creates a tokenizer that first replays the given list of tokens, then continues reading
 /// from another tokenizer. Note that if the returned tokenizer is "pushed back", that does not push back
 /// on the continuation tokenizer, or vice versa. Care should be taken when using this method - it was
 /// created for the sake of Any parsing.
 /// </summary>
 internal static JsonTokenizer FromReplayedTokens(IList<JsonToken> tokens, JsonTokenizer continuation)
 {
     return new JsonReplayTokenizer(tokens, continuation);
 }
示例#25
0
 internal JsonReplayTokenizer(IList<JsonToken> tokens, JsonTokenizer nextTokenizer)
 {
     this.tokens = tokens;
     this.nextTokenizer = nextTokenizer;
 }
示例#26
0
 private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens)
 {
     var reader = new StringReader(json);
     var tokenizer = new JsonTokenizer(reader);
     for (int i = 0; i < expectedTokens.Length; i++)
     {
         var actualToken = tokenizer.Next();
         if (actualToken == JsonToken.EndDocument)
         {
             Assert.Fail("Expected {0} but reached end of document", expectedTokens[i]);
         }
         Assert.AreEqual(expectedTokens[i], actualToken);
     }
     Assert.Throws<InvalidProtocolBufferException>(() => tokenizer.Next());
 }
示例#27
0
 /// <summary>
 /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
 /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character
 /// replacement on the specified JSON, and should be used when the text contains apostrophes which
 /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of 
 /// expected tokens, but is implicit.
 /// </summary>
 private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens)
 {
     var reader = new StringReader(json);
     var tokenizer = new JsonTokenizer(reader);
     for (int i = 0; i < expectedTokens.Length; i++)
     {
         var actualToken = tokenizer.Next();
         if (actualToken == JsonToken.EndDocument)
         {
             Assert.Fail("Expected {0} but reached end of token stream", expectedTokens[i]);
         }
         Assert.AreEqual(expectedTokens[i], actualToken);
     }
     var finalToken = tokenizer.Next();
     if (finalToken != JsonToken.EndDocument)
     {
         Assert.Fail("Expected token stream to be exhausted; received {0}", finalToken);
     }
 }
示例#28
0
 public void CanPushBackEndDocument()
 {
     var tokenizer = new JsonTokenizer(new StringReader("null"));
     Assert.AreEqual(JsonToken.Null, tokenizer.Next());
     Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
     tokenizer.PushBack(JsonToken.EndDocument);
     Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
     Assert.Throws<InvalidOperationException>(() => tokenizer.Next());
 }
示例#29
0
 public void InvalidStructure(string json, int expectedValidTokens)
 {
     // Note: we don't test that the earlier tokens are exactly as expected,
     // partly because that's hard to parameterize.
     var reader = new StringReader(json.Replace('\'', '"'));
     var tokenizer = new JsonTokenizer(reader);
     for (int i = 0; i < expectedValidTokens; i++)
     {
         Assert.IsNotNull(tokenizer.Next());
     }
     Assert.Throws<InvalidProtocolBufferException>(() => tokenizer.Next());
 }
示例#30
0
 public void ObjectDepth()
 {
     string json = "{ \"foo\": { \"x\": 1, \"y\": [ 0 ] } }";
     var tokenizer = new JsonTokenizer(new StringReader(json));
     // If we had more tests like this, I'd introduce a helper method... but for one test, it's not worth it.
     Assert.AreEqual(0, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
     Assert.AreEqual(1, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next());
     Assert.AreEqual(1, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.Value(1), tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.Name("y"), tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.StartArray, tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth); // Depth hasn't changed in array
     Assert.AreEqual(JsonToken.Value(0), tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.EndArray, tokenizer.Next());
     Assert.AreEqual(2, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.EndObject, tokenizer.Next());
     Assert.AreEqual(1, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.EndObject, tokenizer.Next());
     Assert.AreEqual(0, tokenizer.ObjectDepth);
     Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
     Assert.AreEqual(0, tokenizer.ObjectDepth);
 }
示例#31
0
 public JsonReplayTokenizer(IList <JsonToken> tokens, JsonTokenizer nextTokenizer)
 {
     this.tokens        = tokens;
     this.nextTokenizer = nextTokenizer;
 }
示例#32
0
 // Convenience method to avoid having to repeat the same code multiple times in the above
 // dictionary initialization.
 private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer)
 {
     parser.MergeField(message, message.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber], tokenizer);
 }
示例#33
0
        public void ObjectDepth_WithPushBack()
        {
            string json = "{}";
            var tokenizer = new JsonTokenizer(new StringReader(json));
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            var token = tokenizer.Next();
            Assert.AreEqual(1, tokenizer.ObjectDepth);
            // When we push back a "start object", we should effectively be back to the previous depth.
            tokenizer.PushBack(token);
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            // Read the same token again, and get back to depth 1
            token = tokenizer.Next();
            Assert.AreEqual(1, tokenizer.ObjectDepth);

            // Now the same in reverse, with EndObject
            token = tokenizer.Next();
            Assert.AreEqual(0, tokenizer.ObjectDepth);
            tokenizer.PushBack(token);
            Assert.AreEqual(1, tokenizer.ObjectDepth);
            tokenizer.Next();
            Assert.AreEqual(0, tokenizer.ObjectDepth);
        }