private static IEnumerable <NodeType> StepNodes(IEnumerable <string> parts) { using (var tokenizer = new JsonTokenizer()) using (var parser = new NodeParser(tokenizer)) { foreach (var json in parts) { // Tokenize a part of the input data. Write(tokenizer, json); // Read until we consume all input data. while (parser.TokenNextIndex < tokenizer.TokenNextIndex) { var node = parser.Step(); if (node == NodeType.None) { continue; } yield return(node); } } // Flush the parser. while (parser.NodeType != NodeType.None) { yield return(parser.Step()); } } }
private static IEnumerable <NodeType> StepNodes(string json) { using (var tokenizer = new JsonTokenizer()) using (var parser = new NodeParser(tokenizer)) { // Tokenize the entire input data. Write(tokenizer, json); // Read until we have no more input. while (parser.TokenNextIndex < tokenizer.TokenNextIndex) { var node = parser.Step(); if (node == NodeType.None) { continue; } yield return(node); } // Flush the parser. while (parser.NodeType != NodeType.None) { yield return(parser.Step()); } } }
public unsafe void PerformanceTest_NodeParser_Step_MockEntities(int count) { var json = JsonTestData.GetMockEntities(count); fixed(char *ptr = json) { m_Tokenizer.Write(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, 0, json.Length); } Measure.Method(() => { using (var parser = new NodeParser(m_Tokenizer)) { parser.Step(NodeType.None); } }) .Definition("NodeParserStep") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = json.Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); }