private static T ParseNumericString <T>(string text, Func <string, NumberStyles, IFormatProvider, T> parser) { // Can't prohibit this with NumberStyles. if (text.StartsWith("+")) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid numeric value: {text}"); } if (text.StartsWith("0") && text.Length > 1) { if (text[1] >= '0' && text[1] <= '9') { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid numeric value: {text}"); } } else if (text.StartsWith("-0") && text.Length > 2) { if (text[2] >= '0' && text[2] <= '9') { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid numeric value: {text}"); } } try { return(parser(text, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture)); } catch (FormatException) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid numeric value for type: {text}"); } catch (OverflowException) { throw InvalidProtocolBufferException.OnThrowMessage($"Value out of range: {text}"); } }
/// <summary> /// Skips the data for the field with the tag we've just read. /// This should be called directly after <see cref="ReadTag"/>, when /// the caller wishes to skip an unknown field. /// </summary> /// <remarks> /// This method throws <see cref="InvalidProtocolBufferException"/> if the last-read tag was an end-group tag. /// If a caller wishes to skip a group, they should skip the whole group, by calling this method after reading the /// start-group tag. This behavior allows callers to call this method on any field they don't understand, correctly /// resulting in an error if an end-group tag has not been paired with an earlier start-group tag. /// </remarks> /// <exception cref="InvalidProtocolBufferException">The last tag was an end-group tag</exception> /// <exception cref="InvalidOperationException">The last read operation read to the end of the logical stream</exception> public void SkipLastField() { if (lastTag == 0) { throw new InvalidOperationException("SkipLastField cannot be called at the end of a stream"); } switch (WireFormat.GetTagWireType(lastTag)) { case WireFormat.WireType.StartGroup: SkipGroup(lastTag); break; case WireFormat.WireType.EndGroup: throw InvalidProtocolBufferException.OnThrowMessage( "SkipLastField called on an end-group tag, indicating that the corresponding start-group was missing"); case WireFormat.WireType.Fixed32: ReadFixed32(); break; case WireFormat.WireType.Fixed64: ReadFixed64(); break; case WireFormat.WireType.LengthDelimited: var length = ReadLength(); SkipRawBytes(length); break; case WireFormat.WireType.Varint: ReadRawVarint32(); break; } }
private void MergeRepeatedField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) { var token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.StartArray) { throw InvalidProtocolBufferException.OnThrowMessage("Repeated field value was not an array. Token type: " + token.Type); } IList list = (IList)field.Accessor.GetValue(message); while (true) { token = tokenizer.Next(); if (token.Type == JsonToken.TokenType.EndArray) { return; } tokenizer.PushBack(token); if (token.Type == JsonToken.TokenType.Null) { throw InvalidProtocolBufferException.OnThrowMessage("Repeated field elements cannot be null"); } list.Add(ParseSingleValue(field, tokenizer)); } }
private static object ParseSingleStringValue(FieldDescriptor field, string text) { switch (field.FieldType) { case FieldType.String: return(text); case FieldType.Bytes: try { return(ByteString.FromBase64(text)); } catch (FormatException e) { throw InvalidProtocolBufferException.InvalidBase64(e); } case FieldType.Int32: case FieldType.SInt32: case FieldType.SFixed32: return(ParseNumericString(text, int.Parse)); case FieldType.UInt32: case FieldType.Fixed32: return(ParseNumericString(text, uint.Parse)); case FieldType.Int64: case FieldType.SInt64: case FieldType.SFixed64: return(ParseNumericString(text, long.Parse)); case FieldType.UInt64: case FieldType.Fixed64: return(ParseNumericString(text, ulong.Parse)); case FieldType.Double: double d = ParseNumericString(text, double.Parse); ValidateInfinityAndNan(text, double.IsPositiveInfinity(d), double.IsNegativeInfinity(d), double.IsNaN(d)); return(d); case FieldType.Float: float f = ParseNumericString(text, float.Parse); ValidateInfinityAndNan(text, float.IsPositiveInfinity(f), float.IsNegativeInfinity(f), float.IsNaN(f)); return(f); case FieldType.Enum: var enumValue = field.EnumType.FindValueByName(text); if (enumValue == null) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid enum value: {text} for enum type: {field.EnumType.FullName}"); } // Just return it as an int, and let the CLR convert it. return(enumValue.Number); default: throw InvalidProtocolBufferException.OnThrowMessage($"Unsupported conversion from JSON string for field type {field.FieldType}"); } }
/// <summary> /// Checks that any infinite/NaN values originated from the correct text. /// This corrects the lenient whitespace handling of double.Parse/float.Parse, as well as the /// way that Mono parses out-of-range values as infinity. /// </summary> private static void ValidateInfinityAndNan(string text, bool isPositiveInfinity, bool isNegativeInfinity, bool isNaN) { if ((isPositiveInfinity && text != "Infinity") || (isNegativeInfinity && text != "-Infinity") || (isNaN && text != "NaN")) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid numeric value: {text}"); } }
private static void CheckInteger(double value) { if (double.IsInfinity(value) || double.IsNaN(value)) { throw InvalidProtocolBufferException.OnThrowMessage($"Value not an integer: {value}"); } if (value != Math.Floor(value)) { throw InvalidProtocolBufferException.OnThrowMessage($"Value not an integer: {value}"); } }
/// <summary> /// Parses JSON read from <paramref name="jsonReader"/> and merges the information into the given message. /// </summary> /// <param name="message">The message to merge the JSON information into.</param> /// <param name="jsonReader">Reader providing the JSON to parse.</param> internal void Merge(IMessage message, TextReader jsonReader) { var tokenizer = JsonTokenizer.FromTextReader(jsonReader); Merge(message, tokenizer); var lastToken = tokenizer.Next(); if (lastToken != JsonToken.EndDocument) { throw InvalidProtocolBufferException.OnThrowMessage("Expected end of JSON after object"); } }
/// <summary> /// Parse a single field from <paramref name="input"/> and merge it /// into this set. /// </summary> /// <param name="input">The coded input stream containing the field</param> /// <returns>false if the tag is an "end group" tag, true otherwise</returns> private void MergeFieldFrom(CodedInputStream input) { uint tag = input.LastTag; int number = WireFormat.GetTagFieldNumber(tag); switch (WireFormat.GetTagWireType(tag)) { case WireFormat.WireType.Varint: { ulong uint64 = input.ReadUInt64(); GetOrAddField(number).AddVarint(uint64); return; } case WireFormat.WireType.Fixed32: { uint uint32 = input.ReadFixed32(); GetOrAddField(number).AddFixed32(uint32); return; } case WireFormat.WireType.Fixed64: { ulong uint64 = input.ReadFixed64(); GetOrAddField(number).AddFixed64(uint64); return; } case WireFormat.WireType.LengthDelimited: { ByteString bytes = input.ReadBytes(); GetOrAddField(number).AddLengthDelimited(bytes); return; } case WireFormat.WireType.StartGroup: { input.SkipGroup(tag); return; } case WireFormat.WireType.EndGroup: { throw InvalidProtocolBufferException.OnThrowMessage("Merge an unknown field of end-group tag, indicating that the corresponding start-group was missing."); } default: throw new InvalidOperationException("Wire Type is invalid."); } }
private void MergeStruct(IMessage message, JsonTokenizer tokenizer) { var token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.StartObject) { throw InvalidProtocolBufferException.OnThrowMessage("Expected object value for Struct"); } tokenizer.PushBack(token); var field = message.Descriptor.Fields[Struct.FieldsFieldNumber]; MergeMapField(message, field, tokenizer); }
private static void MergeFieldMask(IMessage message, JsonToken token) { if (token.Type != JsonToken.TokenType.StringValue) { throw InvalidProtocolBufferException.OnThrowMessage("Expected string value for FieldMask"); } // TODO: Do we *want* to remove empty entries? Probably okay to treat "" as "no paths", but "foo,,bar"? string[] jsonPaths = token.StringValue.Split(FieldMaskPathSeparators, StringSplitOptions.RemoveEmptyEntries); IList messagePaths = (IList)message.Descriptor.Fields[FieldMask.PathsFieldNumber].Accessor.GetValue(message); foreach (var path in jsonPaths) { messagePaths.Add(ToSnakeCase(path)); } }
// Ported from src/google/protobuf/util/internal/utility.cc private static string ToSnakeCase(string text) { var builder = new StringBuilder(text.Length * 2); // Note: this is probably unnecessary now, but currently retained to be as close as possible to the // C++, whilst still throwing an exception on underscores. bool wasNotUnderscore = false; // Initialize to false for case 1 (below) bool wasNotCap = false; for (int i = 0; i < text.Length; i++) { char c = text[i]; if (c >= 'A' && c <= 'Z') // ascii_isupper { // Consider when the current character B is capitalized: // 1) At beginning of input: "B..." => "b..." // (e.g. "Biscuit" => "biscuit") // 2) Following a lowercase: "...aB..." => "...a_b..." // (e.g. "gBike" => "g_bike") // 3) At the end of input: "...AB" => "...ab" // (e.g. "GoogleLAB" => "google_lab") // 4) Followed by a lowercase: "...ABc..." => "...a_bc..." // (e.g. "GBike" => "g_bike") if (wasNotUnderscore && // case 1 out (wasNotCap || // case 2 in, case 3 out (i + 1 < text.Length && // case 3 out (text[i + 1] >= 'a' && text[i + 1] <= 'z')))) // ascii_islower(text[i + 1]) { // case 4 in // We add an underscore for case 2 and case 4. builder.Append('_'); } // ascii_tolower, but we already know that c *is* an upper case ASCII character... builder.Append((char)(c + 'a' - 'A')); wasNotUnderscore = true; wasNotCap = false; } else { builder.Append(c); if (c == '_') { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid field mask: {text}"); } wasNotUnderscore = true; wasNotCap = true; } } return(builder.ToString()); }
// Well-known types end up in a property called "value" in the JSON. As there's no longer a @type property // in the given JSON token stream, we should *only* have tokens of start-object, name("value"), the value // itself, and then end-object. private void MergeWellKnownTypeAnyBody(IMessage body, JsonTokenizer tokenizer) { var token = tokenizer.Next(); // Definitely start-object; checked in previous method token = tokenizer.Next(); // TODO: What about an absent Int32Value, for example? if (token.Type != JsonToken.TokenType.Name || token.StringValue != JsonFormatter.AnyWellKnownTypeValueField) { throw InvalidProtocolBufferException.OnThrowMessage($"Expected '{JsonFormatter.AnyWellKnownTypeValueField}' property for well-known type Any body"); } Merge(body, tokenizer); token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.EndObject) { throw InvalidProtocolBufferException.OnThrowMessage($"Expected end-object token after @type/value for well-known type"); } }
private static void MergeDuration(IMessage message, JsonToken token) { if (token.Type != JsonToken.TokenType.StringValue) { throw InvalidProtocolBufferException.OnThrowMessage("Expected string value for Duration"); } var match = DurationRegex.Match(token.StringValue); if (!match.Success) { throw InvalidProtocolBufferException.OnThrowMessage("Invalid Duration value: " + token.StringValue); } var sign = match.Groups["sign"].Value; var secondsText = match.Groups["int"].Value; // Prohibit leading insignficant zeroes if (secondsText[0] == '0' && secondsText.Length > 1) { throw InvalidProtocolBufferException.OnThrowMessage("Invalid Duration value: " + token.StringValue); } var subseconds = match.Groups["subseconds"].Value; var multiplier = sign == "-" ? -1 : 1; try { long seconds = long.Parse(secondsText, CultureInfo.InvariantCulture) * multiplier; int nanos = 0; if (subseconds != "") { // This should always work, as we've got 1-9 digits. int parsedFraction = int.Parse(subseconds.Substring(1)); nanos = parsedFraction * SubsecondScalingFactors[subseconds.Length] * multiplier; } if (!Duration.IsNormalized(seconds, nanos)) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid Duration value: {token.StringValue}"); } message.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.SetValue(message, seconds); message.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.SetValue(message, nanos); } catch (FormatException) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid Duration value: {token.StringValue}"); } }
private static object ParseMapKey(FieldDescriptor field, string keyText) { switch (field.FieldType) { case FieldType.Bool: if (keyText == "true") { return(true); } if (keyText == "false") { return(false); } throw InvalidProtocolBufferException.OnThrowMessage("Invalid string for bool map key: " + keyText); case FieldType.String: return(keyText); case FieldType.Int32: case FieldType.SInt32: case FieldType.SFixed32: return(ParseNumericString(keyText, int.Parse)); case FieldType.UInt32: case FieldType.Fixed32: return(ParseNumericString(keyText, uint.Parse)); case FieldType.Int64: case FieldType.SInt64: case FieldType.SFixed64: return(ParseNumericString(keyText, long.Parse)); case FieldType.UInt64: case FieldType.Fixed64: return(ParseNumericString(keyText, ulong.Parse)); default: throw InvalidProtocolBufferException.OnThrowMessage("Invalid field type for map: " + field.FieldType); } }
/// <summary> /// Skip a group. /// </summary> internal void SkipGroup(uint startGroupTag) { // Note: Currently we expect this to be the way that groups are read. We could put the recursion // depth changes into the ReadTag method instead, potentially... recursionDepth++; if (recursionDepth >= recursionLimit) { throw InvalidProtocolBufferException.RecursionLimitExceeded(); } uint tag; while (true) { tag = ReadTag(); if (tag == 0) { throw InvalidProtocolBufferException.TruncatedMessage(); } // Can't call SkipLastField for this case- that would throw. if (WireFormat.GetTagWireType(tag) == WireFormat.WireType.EndGroup) { break; } // This recursion will allow us to handle nested groups. SkipLastField(); } int startField = WireFormat.GetTagFieldNumber(startGroupTag); int endField = WireFormat.GetTagFieldNumber(tag); if (startField != endField) { throw InvalidProtocolBufferException.OnThrowMessage( $"Mismatched end-group tag. Started with field {startField}; ended with field {endField}"); } recursionDepth--; }
private void MergeMapField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) { // Map fields are always objects, even if the values are well-known types: ParseSingleValue handles those. var token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.StartObject) { throw InvalidProtocolBufferException.OnThrowMessage("Expected an object to populate a map"); } var type = field.MessageType; var keyField = type.FindFieldByNumber(1); var valueField = type.FindFieldByNumber(2); if (keyField == null || valueField == null) { throw InvalidProtocolBufferException.OnThrowMessage("Invalid map field: " + field.FullName); } IDictionary dictionary = (IDictionary)field.Accessor.GetValue(message); while (true) { token = tokenizer.Next(); if (token.Type == JsonToken.TokenType.EndObject) { return; } object key = ParseMapKey(keyField, token.StringValue); object value = ParseSingleValue(valueField, tokenizer); if (value == null) { throw InvalidProtocolBufferException.OnThrowMessage("Map values must not be null"); } dictionary[key] = value; } }
private static void MergeTimestamp(IMessage message, JsonToken token) { if (token.Type != JsonToken.TokenType.StringValue) { throw InvalidProtocolBufferException.OnThrowMessage("Expected string value for Timestamp"); } var match = TimestampRegex.Match(token.StringValue); if (!match.Success) { throw InvalidProtocolBufferException.OnThrowMessage($"Invalid Timestamp value: {token.StringValue}"); } var dateTime = match.Groups["datetime"].Value; var subseconds = match.Groups["subseconds"].Value; var offset = match.Groups["offset"].Value; try { DateTime parsed = DateTime.ParseExact( dateTime, "yyyy-MM-dd'T'HH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); // TODO: It would be nice not to have to create all these objects... easy to optimize later though. Timestamp timestamp = Timestamp.FromDateTime(parsed); int nanosToAdd = 0; if (subseconds != "") { // This should always work, as we've got 1-9 digits. int parsedFraction = int.Parse(subseconds.Substring(1), CultureInfo.InvariantCulture); nanosToAdd = parsedFraction * SubsecondScalingFactors[subseconds.Length]; } int secondsToAdd = 0; if (offset != "Z") { // This is the amount we need to *subtract* from the local time to get to UTC - hence - => +1 and vice versa. int sign = offset[0] == '-' ? 1 : -1; int hours = int.Parse(offset.Substring(1, 2), CultureInfo.InvariantCulture); int minutes = int.Parse(offset.Substring(4, 2)); int totalMinutes = hours * 60 + minutes; if (totalMinutes > 18 * 60) { throw InvalidProtocolBufferException.OnThrowMessage("Invalid Timestamp value: " + token.StringValue); } if (totalMinutes == 0 && sign == 1) { // This is an offset of -00:00, which means "unknown local offset". It makes no sense for a timestamp. throw InvalidProtocolBufferException.OnThrowMessage("Invalid Timestamp value: " + token.StringValue); } // We need to *subtract* the offset from local time to get UTC. secondsToAdd = sign * totalMinutes * 60; } // Ensure we've got the right signs. Currently unnecessary, but easy to do. if (secondsToAdd < 0 && nanosToAdd > 0) { secondsToAdd++; nanosToAdd = nanosToAdd - Duration.NanosecondsPerSecond; } if (secondsToAdd != 0 || nanosToAdd != 0) { timestamp += new Duration { Nanos = nanosToAdd, Seconds = secondsToAdd }; // The resulting timestamp after offset change would be out of our expected range. Currently the Timestamp message doesn't validate this // anywhere, but we shouldn't parse it. if (timestamp.Seconds < Timestamp.UnixSecondsAtBclMinValue || timestamp.Seconds > Timestamp.UnixSecondsAtBclMaxValue) { throw InvalidProtocolBufferException.OnThrowMessage("Invalid Timestamp value: " + token.StringValue); } } message.Descriptor.Fields[Timestamp.SecondsFieldNumber].Accessor.SetValue(message, timestamp.Seconds); message.Descriptor.Fields[Timestamp.NanosFieldNumber].Accessor.SetValue(message, timestamp.Nanos); } catch (FormatException) { throw InvalidProtocolBufferException.OnThrowMessage("Invalid Timestamp value: " + token.StringValue); } }
private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer) { var token = tokenizer.Next(); if (token.Type == JsonToken.TokenType.Null) { // TODO: In order to support dynamic messages, we should really build this up // dynamically. if (IsGoogleProtobufValueField(field)) { return(Value.ForNull()); } return(null); } var fieldType = field.FieldType; if (fieldType == FieldType.Message) { // Parse wrapper types as their constituent types. // TODO: What does this mean for null? if (field.MessageType.IsWrapperType) { field = field.MessageType.Fields[WrappersReflection.WrapperValueFieldNumber]; fieldType = field.FieldType; } else { // TODO: Merge the current value in message? (Public API currently doesn't make this relevant as we don't expose merging.) tokenizer.PushBack(token); IMessage subMessage = NewMessageForField(field); Merge(subMessage, tokenizer); return(subMessage); } } switch (token.Type) { case JsonToken.TokenType.True: case JsonToken.TokenType.False: if (fieldType == FieldType.Bool) { return(token.Type == JsonToken.TokenType.True); } // Fall through to "we don't support this type for this case"; could duplicate the behaviour of the default // case instead, but this way we'd only need to change one place. goto default; case JsonToken.TokenType.StringValue: return(ParseSingleStringValue(field, token.StringValue)); // Note: not passing the number value itself here, as we may end up storing the string value in the token too. case JsonToken.TokenType.Number: return(ParseSingleNumberValue(field, token)); case JsonToken.TokenType.Null: throw new NotImplementedException("Haven't worked out what to do for null yet"); default: throw InvalidProtocolBufferException.OnThrowMessage("Unsupported JSON token type " + token.Type + " for field type " + fieldType); } }
private void MergeAny(IMessage message, JsonTokenizer tokenizer) { // Record the token stream until we see the @type property. At that point, we can take the value, consult // the type registry for the relevant message, and replay the stream, omitting the @type property. var tokens = new List <JsonToken>(); var token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.StartObject) { throw InvalidProtocolBufferException.OnThrowMessage("Expected object value for Any"); } int typeUrlObjectDepth = tokenizer.ObjectDepth; // The check for the property depth protects us from nested Any values which occur before the type URL // for *this* Any. while (token.Type != JsonToken.TokenType.Name || token.StringValue != JsonFormatter.AnyTypeUrlField || tokenizer.ObjectDepth != typeUrlObjectDepth) { tokens.Add(token); token = tokenizer.Next(); if (tokenizer.ObjectDepth < typeUrlObjectDepth) { throw InvalidProtocolBufferException.OnThrowMessage("Any message with no @type"); } } // Don't add the @type property or its value to the recorded token list token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.StringValue) { throw InvalidProtocolBufferException.OnThrowMessage("Expected string value for Any.@type"); } string typeUrl = token.StringValue; string typeName = Any.GetTypeName(typeUrl); MessageDescriptor descriptor = settings.TypeRegistry.Find(typeName); if (descriptor == null) { throw new InvalidOperationException($"Type registry has no descriptor for type name '{typeName}'"); } // Now replay the token stream we've already read and anything that remains of the object, just parsing it // as normal. Our original tokenizer should end up at the end of the object. var replay = JsonTokenizer.FromReplayedTokens(tokens, tokenizer); var body = descriptor.Parser.CreateTemplate(); if (descriptor.IsWellKnownType) { MergeWellKnownTypeAnyBody(body, replay); } else { Merge(body, replay); } var data = body.ToByteString(); // Now that we have the message data, we can pack it into an Any (the message received as a parameter). message.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.SetValue(message, typeUrl); message.Descriptor.Fields[Any.ValueFieldNumber].Accessor.SetValue(message, data); }
/// <summary> /// Merges the given message using data from the given tokenizer. In most cases, the next /// token should be a "start object" token, but wrapper types and nullity can invalidate /// that assumption. This is implemented as an LL(1) recursive descent parser over the stream /// of tokens provided by the tokenizer. This token stream is assumed to be valid JSON, with the /// tokenizer performing that validation - but not every token stream is valid "protobuf JSON". /// </summary> private void Merge(IMessage message, JsonTokenizer tokenizer) { if (tokenizer.ObjectDepth > settings.RecursionLimit) { throw InvalidProtocolBufferException.JsonRecursionLimitExceeded(); } if (message.Descriptor.IsWellKnownType) { Action <JsonParser, IMessage, JsonTokenizer> handler; if (WellKnownTypeHandlers.TryGetValue(message.Descriptor.FullName, out handler)) { handler(this, message, tokenizer); return; } // Well-known types with no special handling continue in the normal way. } var token = tokenizer.Next(); if (token.Type != JsonToken.TokenType.StartObject) { throw InvalidProtocolBufferException.OnThrowMessage("Expected an object"); } var descriptor = message.Descriptor; var jsonFieldMap = descriptor.Fields.ByJsonName(); // All the oneof fields we've already accounted for - we can only see each of them once. // The set is created lazily to avoid the overhead of creating a set for every message // we parsed, when oneofs are relatively rare. HashSet <OneofDescriptor> seenOneofs = null; while (true) { token = tokenizer.Next(); if (token.Type == JsonToken.TokenType.EndObject) { return; } if (token.Type != JsonToken.TokenType.Name) { throw new InvalidOperationException("Unexpected token type " + token.Type); } string name = token.StringValue; FieldDescriptor field; if (jsonFieldMap.TryGetValue(name, out field)) { if (field.ContainingOneof != null) { if (seenOneofs == null) { seenOneofs = new HashSet <OneofDescriptor>(); } if (!seenOneofs.Add(field.ContainingOneof)) { throw InvalidProtocolBufferException.OnThrowMessage($"Multiple values specified for oneof {field.ContainingOneof.Name}"); } } MergeField(message, field, tokenizer); } else { // TODO: Is this what we want to do? If not, we'll need to skip the value, // which may be an object or array. (We might want to put code in the tokenizer // to do that.) throw InvalidProtocolBufferException.OnThrowMessage("Unknown field: " + name); } } }
private static object ParseSingleNumberValue(FieldDescriptor field, JsonToken token) { double value = token.NumberValue; checked { try { switch (field.FieldType) { case FieldType.Int32: case FieldType.SInt32: case FieldType.SFixed32: CheckInteger(value); return((int)value); case FieldType.UInt32: case FieldType.Fixed32: CheckInteger(value); return((uint)value); case FieldType.Int64: case FieldType.SInt64: case FieldType.SFixed64: CheckInteger(value); return((long)value); case FieldType.UInt64: case FieldType.Fixed64: CheckInteger(value); return((ulong)value); case FieldType.Double: return(value); case FieldType.Float: if (double.IsNaN(value)) { return(float.NaN); } if (value > float.MaxValue || value < float.MinValue) { if (double.IsPositiveInfinity(value)) { return(float.PositiveInfinity); } if (double.IsNegativeInfinity(value)) { return(float.NegativeInfinity); } throw InvalidProtocolBufferException.OnThrowMessage($"Value out of range: {value}"); } return((float)value); case FieldType.Enum: CheckInteger(value); // Just return it as an int, and let the CLR convert it. // Note that we deliberately don't check that it's a known value. return((int)value); default: throw InvalidProtocolBufferException.OnThrowMessage($"Unsupported conversion from JSON number for field type {field.FieldType}"); } } catch (OverflowException) { throw InvalidProtocolBufferException.OnThrowMessage($"Value out of range: {value}"); } } }