public unsafe void PerformanceTest_NodeParser_Step_MockEntities(int count) { var json = JsonTestData.GetMockEntities(count); fixed(char *ptr = json) { m_Tokenizer.Write(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, 0, json.Length); } Measure.Method(() => { using (var parser = new NodeParser(m_Tokenizer)) { parser.Step(NodeType.None); } }) .Definition("NodeParserStep") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = json.Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); }
public unsafe void PerformanceTest_PackedBinaryWriter_Write_MockEntities(int count) { var json = JsonTestData.GetMockEntities(count); fixed(char *ptr = json) { m_Tokenizer.Write(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, 0, json.Length); } Measure.Method(() => { using (var stream = new PackedBinaryStream(Allocator.TempJob)) using (var writer = new PackedBinaryWriter(stream, m_Tokenizer)) { fixed(char *ptr = json) { writer.Write(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, m_Tokenizer.TokenNextIndex); } } }) .Definition("PackedBinaryWriterWrite") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = json.Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); }
public unsafe void PerformanceTest_JsonTokenizer_WriteWithStandardValidation_MockEntities(int count, int initialTokenBuffer) { var json = JsonTestData.GetMockEntities(count); Measure.Method(() => { fixed(char *ptr = json) { using (var tokenizer = new JsonTokenizer(initialTokenBuffer, JsonValidationType.Standard) { AllowTokenBufferResize = true }) { tokenizer.Write(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, 0, json.Length); } } }) .Definition("JsonTokenizerWrite") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = json.Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); }
public unsafe void PerformanceTest_JsonStandardValidator_Validate_MockEntities(int count) { var json = JsonTestData.GetMockEntities(count); Measure.Method(() => { fixed(char *ptr = json) { using (var validator = new JsonStandardValidator()) { validator.Validate(new UnsafeBuffer <char> { Buffer = ptr, Length = json.Length }, 0, json.Length); } } }) .Definition("JsonStandardValidatorValidate") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = json.Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); }
public unsafe void PerformanceTest_SerializedObjectReader_Read_MockEntities(int count, int batchSize) { File.WriteAllText("test.json", JsonTestData.GetMockEntities(count)); try { Measure.Method(() => { var views = stackalloc SerializedValueView[batchSize]; var config = SerializedObjectReaderConfiguration.Default; config.BlockBufferSize = 512 << 10; config.NodeBufferSize = batchSize; config.ValidationType = JsonValidationType.None; config.UseReadAsync = true; config.OutputBufferSize = 4096 << 10; using (var stream = new FileStream("test.json", FileMode.Open, FileAccess.Read, FileShare.Read, config.BlockBufferSize, FileOptions.Asynchronous)) using (var reader = new SerializedObjectReader(stream, config)) { reader.Step(); while (reader.ReadArrayElementBatch(views, batchSize) != 0) { reader.DiscardCompleted(); } reader.Step(); } }) .Definition("SerializedObjectReaderRead") .WarmupCount(1) .MeasurementCount(100) .Run(); PerformanceTest.Active.CalculateStatisticalValues(); var size = new FileInfo("test.json").Length / (double)1024 / 1024; Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]"); } finally { File.Delete("test.json"); } }