Пример #1
0
        static IEnumerable <NodeType> StepNodes(string json)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
                using (var parser = new NodeParser(tokenizer, Allocator.TempJob))
                {
                    // Tokenize the entire input data.
                    Write(tokenizer, json);

                    // Read until we have no more input.
                    while (parser.TokenNextIndex < tokenizer.TokenNextIndex)
                    {
                        var node = parser.Step();

                        if (node == NodeType.None)
                        {
                            continue;
                        }

                        yield return(node);
                    }

                    // Flush the parser.
                    while (parser.NodeType != NodeType.None)
                    {
                        yield return(parser.Step());
                    }
                }
        }
Пример #2
0
        private async Task <IList <object> > ProduceJsonArrayAsync(JsonTokenizer tokenizer, int depth, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer)
        {
            TokenType lsq;

            if (TracingEnabled)
            {
                JsonToken t = NextAndGetToken(tokenizer, tokenTailBuffer, nodeTailBuffer); // pop the leading [.
                tokenTailBuffer.Push(t);
                lsq = t.Type;
            }
            else
            {
                lsq = NextAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer);
            }
            if (lsq != TokenType.LSQUARE)
            {
                // this cannot happen.
                throw new InvalidJsonTokenException("JSON array should start with [. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer));
            }

            IList <object> list = new List <object>();
            var            type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer);

            if (type == TokenType.RSQUARE)
            {
                // empty array
                JsonToken t = tokenizer.Next(); // discard the trailing ].
                if (TracingEnabled)
                {
                    tokenTailBuffer.Push(t);
                }
            }
            else
            {
                IList <object> elements = await ProduceJsonArrayElementsAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer);

                var rsq = NextAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); // discard the trailing ].
                if (rsq == TokenType.RSQUARE)
                {
                    // Done
                    ((List <object>)list).AddRange(elements);
                }
                else
                {
                    // ???
                    throw new InvalidJsonTokenException("JSON array should end with ]. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer));
                }
            }
            IList <object> jArray = jsonTypeFactory.CreateArray(list);

            if (TracingEnabled)
            {
                nodeTailBuffer.Push(jArray);
            }

            //if (log.IsLoggable(Level.FINE)) {
            //    log.fine("jArray = " + jArray);
            //}
            return(jArray);
        }
Пример #3
0
        static IEnumerable <NodeType> StepNodes(IEnumerable <string> parts)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
                using (var parser = new NodeParser(tokenizer, Allocator.TempJob))
                {
                    foreach (var json in parts)
                    {
                        // Tokenize a part of the input data.
                        Write(tokenizer, json);

                        // Read until we consume all input data.
                        while (parser.TokenNextIndex < tokenizer.TokenNextIndex)
                        {
                            var node = parser.Step();

                            if (node == NodeType.None)
                            {
                                continue;
                            }

                            yield return(node);
                        }
                    }

                    // Flush the parser.
                    while (parser.NodeType != NodeType.None)
                    {
                        yield return(parser.Step());
                    }
                }
        }
Пример #4
0
        /// <summary>
        /// Deserializes the payload using the JsonParser, then converts the resulting clr objects into payload element form
        /// Throws an TaupoInvalidOperationException if the json does not evaluate successfully.
        /// </summary>
        /// <param name="serialized">A raw json payload</param>
        /// <param name="payloadContext">Additional payload information to aid deserialization</param>
        /// <returns>A PayloadElement representation of the payload</returns>
        public ODataPayloadElement DeserializeFromBinary(byte[] serialized, ODataPayloadContext payloadContext)
        {
            ExceptionUtilities.CheckArgumentNotNull(serialized, "serialized");
            ExceptionUtilities.CheckArgumentNotNull(payloadContext, "payloadContext");

            string encodingName = payloadContext.EncodingName;

            ODataPayloadElement errorPayload = null;

            if (this.payloadErrorDeserializer.TryDeserializeErrorPayload(serialized, encodingName, out errorPayload))
            {
                return(errorPayload);
            }

            // Evaluate the given JSON text
            Encoding encoding = HttpUtilities.GetEncodingOrDefault(encodingName);
            string   payload  = encoding.GetString(serialized, 0, serialized.Length);

            JsonValue jsonData = null;

            using (StringReader reader = new StringReader(payload))
            {
                JsonTokenizer tokenizer = new JsonTokenizer(reader);
                JsonParser    parser    = new JsonParser(tokenizer);
                jsonData = parser.ParseValue();
            }

            // convert the deserialized JsonValue objects into payload elements
            return(this.payloadConverter.ConvertToPayloadElement(jsonData));
        }
Пример #5
0
        public unsafe void PerformanceTest_JsonTokenizer_WriteWithStandardValidation_MockEntities(int count, int initialTokenBuffer)
        {
            var json = JsonTestData.GetMockEntities(count);

            Measure.Method(() =>
            {
                fixed(char *ptr = json)
                {
                    using (var tokenizer = new JsonTokenizer(initialTokenBuffer, JsonValidationType.Standard)
                    {
                        AllowTokenBufferResize = true
                    })
                    {
                        tokenizer.Write(new UnsafeBuffer <char> {
                            Buffer = ptr, Length = json.Length
                        }, 0, json.Length);
                    }
                }
            })
            .Definition("JsonTokenizerWrite")
            .WarmupCount(1)
            .MeasurementCount(100)
            .Run();

            PerformanceTest.Active.CalculateStatisticalValues();

            var size = json.Length / (double)1024 / 1024;

            Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]");
        }
Пример #6
0
        private static void TestTokenizedValue(byte[] data, TokenType tokenType, string tokenData)
        {
            var tokeniser = new JsonTokenizer(data);

            TestStep(tokeniser, tokenType, tokenData);
            TestStep(tokeniser, TokenType.End, "");
        }
Пример #7
0
        /// <summary>
        /// Parses the specified text reader content into a <see cref="JsonValue"/>
        /// </summary>
        /// <param name="reader">The reader to read.</param>
        /// <returns>The <see cref="JsonValue"/> read from the reader.</returns>
        public static JsonValue ParseValue(TextReader reader)
        {
            var tokenizer = new JsonTokenizer(reader);
            var parser    = new JsonTextPreservingParser(tokenizer);

            return(parser.ParseValueOrProperty());
        }
Пример #8
0
 private static dynamic ReadJson(HttpWebResponse response)
 {
     using (response)
     {
         return(JsonTokenizer.Parse(Read(response)));
     }
 }
Пример #9
0
            public void Dispose()
            {
                _tokenizer.Dispose();
                _tokenizer = null !;

                Current = null !;
            }
        private IList <object> ProduceJsonArrayElements(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer)
        {
            IList <object> elements = new List <object>();

            var type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer);

            while (type != TokenType.RSQUARE)
            {
                object element = ProduceJsonArrayElement(tokenizer, tokenTailBuffer, nodeTailBuffer);
                if (element != null)
                {
                    elements.Add(element);
                }
                type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer);

                // "consume" the comma.
                if (parserPolicy.AllowExtraCommas)
                {
                    while (type == TokenType.COMMA)
                    {
                        JsonToken t = tokenizer.Next();
                        if (TracingEnabled)
                        {
                            tokenTailBuffer.Push(t);
                        }
                        type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer);
                    }
                }
                else
                {
                    if (type == TokenType.COMMA)
                    {
                        JsonToken t = tokenizer.Next();
                        if (TracingEnabled)
                        {
                            tokenTailBuffer.Push(t);
                        }
                        type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer);

                        if (parserPolicy.AllowTrailingComma)
                        {
                            // Continue.
                        }
                        else
                        {
                            // Invalid  char sequence: ",]"
                            if (type == TokenType.RSQUARE)
                            {
                                throw new InvalidJsonTokenException("Syntax error: Array has a trailing comma. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer));
                            }
                        }
                    }
                }
            }

            //if (log.isLoggable(Level.FINER)) {
            //    log.finer("elements = " + elements);
            //}
            return(elements);
        }
Пример #11
0
            public override ODataPayloadElement Visit(PrimitiveValue payloadElement)
            {
                var rawText  = payloadElement.Annotations.OfType <RawTextPayloadElementRepresentationAnnotation>().Select(r => r.Text).SingleOrDefault();
                var dataType = payloadElement.Annotations.OfType <DataTypeAnnotation>().Select(d => d.DataType).OfType <PrimitiveDataType>().SingleOrDefault();

                if (rawText != null && dataType != null)
                {
                    using (var reader = new StringReader(rawText))
                    {
                        var tokenizer = new JsonTokenizer(reader);
                        var parsed    = tokenizer.Value;

                        if (tokenizer.TokenType == JsonTokenType.String)
                        {
                            var clrType = dataType.GetFacetValue <PrimitiveClrTypeFacet, Type>(null);
                            if (this.converter.TryConvertFromString((string)parsed, clrType, out parsed))
                            {
                                return(payloadElement.ReplaceWith(new PrimitiveValue(payloadElement.FullTypeName, parsed)));
                            }
                        }
                    }
                }

                return(base.Visit(payloadElement));
            }
Пример #12
0
 static void CheckEndOfExpression(JsonTokenizer tokenizer)
 {
     if (tokenizer.TryReadNext())
     {
         ThrowUnexpected(tokenizer);
     }
 }
        private string ProduceJsonString(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer)
        {
            string jString = null;

            try {
                JsonToken t = tokenizer.Next();
                // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> t = " + t);
                if (TracingEnabled)
                {
                    tokenTailBuffer.Push(t);
                }
                string value = (string)t.Value;
                // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> value = " + value);
                jString = (string)jsonTypeFactory.CreateString(value);
                // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> jString = " + jString);
            } catch (Exception e) {
                throw new UnknownParserException("Failed to create a String node. " + tokenTailBuffer.ToTraceString(), e, GetTailCharStream(tokenizer), PeekCharStream(tokenizer));
            }
            if (TracingEnabled)
            {
                // ???
                nodeTailBuffer.Push(jString);
            }
            return(jString);
        }
Пример #14
0
        JsonNode ParseArray(JsonTokenizer tokenizer)
        {
            var res = new JsonNode();

            res.MakeArray();
            bool needSep = false;

            while (true)
            {
                tokenizer.ReadNext();

                if (tokenizer.SpecialChar == ']')
                {
                    return(res);
                }

                if (needSep)
                {
                    if (tokenizer.SpecialChar != ',')
                    {
                        throw new JsonException("Separator expected");
                    }

                    tokenizer.ReadNext();
                }

                JsonNode value = ParseValue(tokenizer);

                res.m_Array.Add(value);
                value.AssignParent(res);

                needSep = true;
            }
        }
Пример #15
0
        public static JsonData Parse(string json)
        {
            _buffer ??= new StringBuilder(200);

            using var tokenizer = new JsonTokenizer(json, _buffer);
            return(Parse(tokenizer));
        }
Пример #16
0
        public static JsonData Parse(Stream stream, Encoding?encoding = null)
        {
            _buffer ??= new StringBuilder(200);

            using var tokenizer = new JsonTokenizer(new JsonReader(stream, encoding), _buffer);
            return(Parse(tokenizer));
        }
Пример #17
0
        public TElement[] Deserialize(ref JsonTokenizer tokenizer)
        {
            var buffer = new LocalList <TElement>();

            while (tokenizer.MoveNext())
            {
                var token     = tokenizer.Current;
                var tokenType = token.TokenType;

                // ReSharper disable once ConvertIfStatementToSwitchStatement
                if (tokenType == JsonTokenType.Null)
                {
                    return(null);
                }
                if (tokenType == JsonTokenType.ArrayStart)
                {
                    continue;
                }
                if (tokenType == JsonTokenType.ArrayEnd)
                {
                    break;
                }

                var element = _elementConverter.Deserialize(ref tokenizer);
                buffer.Add(element);
            }

            return(buffer.ToArray());
        }
Пример #18
0
        public void ParsingComplexJsonObject_Works()
        {
            // arrange
            string        json = File.ReadAllText("complexJsonObject.json");
            JsonTokenizer sut  = new JsonTokenizer();

            // act & assert

            // root object
            AssertTokenType(json, sut, JsonTokenTypeEnum.StartObject);

            AssertPropertyName(json, sut, "Name");
            AssertPropertyValue(json, sut, "Foo");

            AssertPropertyName(json, sut, "Age");
            AssertPropertyValue(json, sut, "123");

            AssertPropertyName(json, sut, "IsHuman");
            AssertPropertyValue(json, sut, "false");

            // tags array
            AssertPropertyName(json, sut, "Tags");
            AssertTokenType(json, sut, JsonTokenTypeEnum.StartArray);
            AssertPropertyName(json, sut, "robot");
            AssertPropertyName(json, sut, "ai");
            AssertPropertyName(json, sut, "replicator");
            AssertPropertyName(json, sut, "42");
            AssertTokenType(json, sut, JsonTokenTypeEnum.EndArray);

            // inventory object
            AssertPropertyName(json, sut, "Inventory");
            AssertTokenType(json, sut, JsonTokenTypeEnum.StartObject);
            AssertPropertyName(json, sut, "Slots");
            AssertPropertyValue(json, sut, "10");

            AssertPropertyName(json, sut, "Weight");
            AssertPropertyValue(json, sut, "1.7");

            // items array
            AssertPropertyName(json, sut, "Items");
            AssertTokenType(json, sut, JsonTokenTypeEnum.StartArray);

            // inventory item
            AssertTokenType(json, sut, JsonTokenTypeEnum.StartObject);
            AssertPropertyName(json, sut, "Name");
            AssertPropertyValue(json, sut, "Burner");
            AssertTokenType(json, sut, JsonTokenTypeEnum.EndObject);

            // inventory item
            AssertTokenType(json, sut, JsonTokenTypeEnum.StartObject);
            AssertPropertyName(json, sut, "Name");
            AssertPropertyValue(json, sut, "Lightsaber");
            AssertTokenType(json, sut, JsonTokenTypeEnum.EndObject);

            AssertTokenType(json, sut, JsonTokenTypeEnum.EndArray);
            AssertTokenType(json, sut, JsonTokenTypeEnum.EndObject);

            AssertTokenType(json, sut, JsonTokenTypeEnum.EndObject);
        }
Пример #19
0
 protected internal virtual void DisableTracing(JsonTokenizer tokenizer)
 {
     tracingEnabled = false;
     if (tokenizer != null && tokenizer is TraceableJsonTokenizer)
     {
         ((TraceableJsonTokenizer)tokenizer).DisableTracing();
     }
 }
Пример #20
0
 public virtual void DisableLookAheadParsing(JsonTokenizer tokenizer)
 {
     this.lookAheadParsing = false;
     if (tokenizer != null && tokenizer is LookAheadJsonTokenizer)
     {
         ((LookAheadJsonTokenizer)tokenizer).DisableLookAheadParsing();
     }
 }
Пример #21
0
        public IEnumerable <TEntity> GetEntities(IEntitySourceContext <TEntity> context)
        {
            var tokenizer = new JsonTokenizer(GetReader());

            tokenizer.Skip(JsonTokenType.ArrayStart);

            return(new Enumerator(_converters, _descriptions, tokenizer));
        }
Пример #22
0
        public void NextAfterEndDocumentThrows()
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null"));

            Assert.Equal(JsonToken.Null, tokenizer.Next());
            Assert.Equal(JsonToken.EndDocument, tokenizer.Next());
            Assert.Throws <InvalidOperationException>(() => tokenizer.Next());
        }
Пример #23
0
 public virtual void EnableLookAheadParsing(JsonTokenizer tokenizer)
 {
     this.lookAheadParsing = true;
     if (tokenizer != null && tokenizer is LookAheadJsonTokenizer)
     {
         ((LookAheadJsonTokenizer)tokenizer).EnableLookAheadParsing();
     }
 }
Пример #24
0
        protected TElement DeserializeElement(JsonTokenizer tokenizer)
        {
            var token     = tokenizer.Current;
            var tokenType = token.TokenType;

            return(tokenType != JsonTokenType.Null
                ? _elementConverter.Deserialize(tokenizer)
                : default !);
Пример #25
0
        public static JsonData VisitProperty(JsonTokenizer tokenizer, out string propertyName)
        {
            var property = tokenizer.Current.GetNotNullPropertyName();

            tokenizer.MoveNext();

            propertyName = property !;
            return(Visit(tokenizer));
        }
Пример #26
0
        static void ThrowUnexpected(JsonTokenizer tokenizer)
        {
            if (tokenizer.HasSpecialChar)
            {
                throw new JsonException("Unexpected special character: " + tokenizer.SpecialChar.ToString());
            }

            throw new JsonException((tokenizer.TokenIsString ? "Unexpected string: " : "Unexpected token: ") + tokenizer.Token);
        }
Пример #27
0
        public void SkipValue(string json)
        {
            var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json.Replace('\'', '"')));

            Assert.Equal(JsonToken.StartObject, tokenizer.Next());
            Assert.Equal("skip", tokenizer.Next().StringValue);
            tokenizer.SkipValue();
            Assert.Equal("next", tokenizer.Next().StringValue);
        }
Пример #28
0
        private JsonToken NextAndGetToken(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer)
        {
            JsonToken s = tokenizer.Next();

            if (s == null)
            {
                throw new UnknownParserException("Failed to get the next json token. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer));
            }
            return(s);
        }
Пример #29
0
 static void Write(JsonTokenizer tokenizer, string json)
 {
     unsafe
     {
         fixed(char *ptr = json)
         {
             tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
         }
     }
 }
Пример #30
0
        public TNullable?Deserialize(ref JsonTokenizer tokenizer)
        {
            var token = tokenizer.Current;

            if (token.TokenType == JsonTokenType.Null)
            {
                return(null);
            }
            return(_valueConverter.Deserialize(ref tokenizer));
        }