unsafe bool FillBuffer()
        {
            if (m_Parser.TokenNextIndex < m_Tokenizer.TokenNextIndex || m_Parser.NodeType != NodeType.None)
            {
                return(false);
            }

            m_Block = m_BlockReader.GetNextBlock();

            if (null == m_Block || m_Block.Length == 0)
            {
                m_Block = null;
                return(false);
            }

            m_Tokenizer.DiscardCompleted();
            m_Parser.Seek(m_Tokenizer.TokenNextIndex, m_Tokenizer.TokenParentIndex);
            m_BinaryWriter.Seek(m_Tokenizer.TokenNextIndex, m_BinaryWriter.TokenParentIndex != -1
                                    ? m_Tokenizer.DiscardRemap[m_BinaryWriter.TokenParentIndex]
                                    : -1);

            fixed(char *ptr = m_Block.Buffer)
            {
                m_Tokenizer.Write(new UnsafeBuffer <char> {
                    Buffer = ptr, Length = m_Block.Buffer.Length
                }, 0, m_Block.Length);
            }

            return(true);
        }
コード例 #2
0
        public unsafe void PerformanceTest_PackedBinaryWriter_Write_MockEntities(int count)
        {
            var json = JsonTestData.GetMockEntities(count);

            fixed(char *ptr = json)
            {
                m_Tokenizer.Write(new UnsafeBuffer <char> {
                    Buffer = ptr, Length = json.Length
                }, 0, json.Length);
            }

            Measure.Method(() =>
            {
                using (var stream = new PackedBinaryStream(Allocator.TempJob))
                    using (var writer = new PackedBinaryWriter(stream, m_Tokenizer))
                    {
                        fixed(char *ptr = json)
                        {
                            writer.Write(new UnsafeBuffer <char> {
                                Buffer = ptr, Length = json.Length
                            }, m_Tokenizer.TokenNextIndex);
                        }
                    }
            })
            .Definition("PackedBinaryWriterWrite")
            .WarmupCount(1)
            .MeasurementCount(100)
            .Run();

            PerformanceTest.Active.CalculateStatisticalValues();

            var size = json.Length / (double)1024 / 1024;

            Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]");
        }
コード例 #3
0
        public unsafe void PerformanceTest_JsonTokenizer_WriteWithStandardValidation_MockEntities(int count, int initialTokenBuffer)
        {
            var json = JsonTestData.GetMockEntities(count);

            Measure.Method(() =>
            {
                fixed(char *ptr = json)
                {
                    using (var tokenizer = new JsonTokenizer(initialTokenBuffer, JsonValidationType.Standard)
                    {
                        AllowTokenBufferResize = true
                    })
                    {
                        tokenizer.Write(new UnsafeBuffer <char> {
                            Buffer = ptr, Length = json.Length
                        }, 0, json.Length);
                    }
                }
            })
            .Definition("JsonTokenizerWrite")
            .WarmupCount(1)
            .MeasurementCount(100)
            .Run();

            PerformanceTest.Active.CalculateStatisticalValues();

            var size = json.Length / (double)1024 / 1024;

            Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]");
        }
コード例 #4
0
        public unsafe void PerformanceTest_NodeParser_Step_MockEntities(int count)
        {
            var json = JsonTestData.GetMockEntities(count);

            fixed(char *ptr = json)
            {
                m_Tokenizer.Write(new UnsafeBuffer <char> {
                    Buffer = ptr, Length = json.Length
                }, 0, json.Length);
            }

            Measure.Method(() =>
            {
                using (var parser = new NodeParser(m_Tokenizer))
                {
                    parser.Step(NodeType.None);
                }
            })
            .Definition("NodeParserStep")
            .WarmupCount(1)
            .MeasurementCount(100)
            .Run();

            PerformanceTest.Active.CalculateStatisticalValues();

            var size = json.Length / (double)1024 / 1024;

            Debug.Log($"MB/s=[{size / (PerformanceTest.Active.SampleGroups.First().Median / 1000)}]");
        }
コード例 #5
0
 static void Write(JsonTokenizer tokenizer, string json)
 {
     unsafe
     {
         fixed(char *ptr = json)
         {
             tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
         }
     }
 }
コード例 #6
0
 public unsafe void JsonTokenizer_Write_InvalidJson(string json)
 {
     using (var tokenizer = new JsonTokenizer(4))
     {
         Assert.Throws <InvalidJsonException>(() =>
         {
             fixed(char *ptr = json)
             {
                 tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
             }
         });
     }
 }
コード例 #7
0
        public unsafe void JsonTokenizer_Write_TokenBufferOverflow_DoesNotThrow()
        {
            const string json = @"{""foo"": 123, ""bar"": 456}";

            using (var tokenizer = new JsonTokenizer(4))
            {
                Assert.DoesNotThrow(() =>
                {
                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                    }
                });
            }
        }
コード例 #8
0
        public unsafe void JsonTokenizer_Write_EmptyArray(string json)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                fixed(char *ptr = json)
                {
                    tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                }

                Assert.AreEqual(1, tokenizer.TokenNextIndex);
                Assert.AreEqual(TokenType.Array, tokenizer.Tokens[0].Type);
                Assert.AreEqual(-1, tokenizer.Tokens[0].Parent);
                Assert.AreNotEqual(-1, tokenizer.Tokens[0].End);
            }
        }
コード例 #9
0
        public unsafe void PackedBinaryWriter_Write(string json)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
                using (var stream = new PackedBinaryStream(Allocator.TempJob))
                    using (var writer = new PackedBinaryWriter(stream, tokenizer, Allocator.TempJob))
                    {
                        fixed(char *ptr = json)
                        {
                            var buffer = new UnsafeBuffer <char>(ptr, json.Length);

                            tokenizer.Write(buffer, 0, json.Length);
                            writer.Write(buffer, tokenizer.TokenNextIndex);
                        }
                    }
        }
コード例 #10
0
        public unsafe void JsonTokenizer_DiscardCompleted(string json, int expectedCountBeforeDiscard, int expectedCountAfterDiscard)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                fixed(char *ptr = json)
                {
                    tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                }

                Assert.AreEqual(expectedCountBeforeDiscard, tokenizer.TokenNextIndex);

                tokenizer.DiscardCompleted();

                Assert.AreEqual(expectedCountAfterDiscard, tokenizer.TokenNextIndex);
            }
        }
コード例 #11
0
        public unsafe void JsonTokenizer_Write_ObjectWithMember(string json)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                fixed(char *ptr = json)
                {
                    tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                }

                Assert.AreEqual(3, tokenizer.TokenNextIndex);
                Assert.AreEqual(TokenType.Object, tokenizer.Tokens[0].Type);
                Assert.AreNotEqual(-1, tokenizer.Tokens[0].End);
                Assert.AreEqual(TokenType.String, tokenizer.Tokens[1].Type);
                Assert.AreEqual(TokenType.Primitive, tokenizer.Tokens[2].Type);
            }
        }
コード例 #12
0
        public unsafe void JsonTokenizer_Write_Comments(string json)
        {
            using (var tokenizer = new JsonTokenizer(4))
            {
                fixed(char *ptr = json)
                {
                    tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);

                    Assert.That(tokenizer.TokenNextIndex, Is.EqualTo(4));
                    Assert.That(tokenizer.Tokens[0].Type, Is.EqualTo(TokenType.Object));
                    Assert.That(tokenizer.Tokens[1].Type, Is.EqualTo(TokenType.Primitive));
                    Assert.That(tokenizer.Tokens[2].Type, Is.EqualTo(TokenType.Comment));
                    Assert.That(tokenizer.Tokens[3].Type, Is.EqualTo(TokenType.Primitive));
                }
            }
        }
コード例 #13
0
        public unsafe void JsonTokenizer_Write_PartialNumber(string input)
        {
            var parts = input.Split('|');

            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                foreach (var json in parts)
                {
                    Assert.IsNotNull(json);

                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                    }
                }

                Assert.AreEqual(parts.Length + 2, tokenizer.TokenNextIndex);
                Assert.AreEqual(TokenType.Object, tokenizer.Tokens[0].Type);
                Assert.AreEqual(TokenType.String, tokenizer.Tokens[1].Type);

                for (var i = 0; i < parts.Length; i++)
                {
                    var token = tokenizer.Tokens[i + 2];

                    Assert.AreEqual(i + 1, token.Parent);
                    Assert.AreEqual(TokenType.Primitive, token.Type);

                    if (i == 0)
                    {
                        Assert.AreNotEqual(-1, token.Start);
                    }
                    else
                    {
                        Assert.AreEqual(-1, token.Start);
                    }

                    if (i == parts.Length - 1)
                    {
                        Assert.AreNotEqual(-1, token.End);
                    }
                    else
                    {
                        Assert.AreEqual(-1, token.End);
                    }
                }
            }
        }
コード例 #14
0
        public unsafe void JsonTokenizer_Write_PartialString(params object[] parts)
        {
            using (var tokenizer = new JsonTokenizer())
            {
                foreach (string json in parts)
                {
                    Assert.IsNotNull(json);

                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char> {
                            Buffer = ptr, Length = json.Length
                        }, 0, json.Length);
                    }
                }

                Assert.AreEqual(parts.Length + 2, tokenizer.TokenNextIndex);
                Assert.AreEqual(TokenType.Object, tokenizer.Tokens[0].Type);
                Assert.AreEqual(TokenType.String, tokenizer.Tokens[1].Type);

                for (var i = 0; i < parts.Length; i++)
                {
                    var token = tokenizer.Tokens[i + 2];

                    Assert.AreEqual(i + 1, token.Parent);
                    Assert.AreEqual(TokenType.String, token.Type);

                    if (i == 0)
                    {
                        Assert.AreNotEqual(-1, token.Start);
                    }
                    else
                    {
                        Assert.AreEqual(-1, token.Start);
                    }

                    if (i == parts.Length - 1)
                    {
                        Assert.AreNotEqual(-1, token.End);
                    }
                    else
                    {
                        Assert.AreEqual(-1, token.End);
                    }
                }
            }
        }
コード例 #15
0
        public unsafe void JsonTokenizer_Write_EmptyObject(string json)
        {
            fixed(char *ptr = json)
            {
                using (var tokenizer = new JsonTokenizer())
                {
                    tokenizer.Write(new UnsafeBuffer <char> {
                        Buffer = ptr, Length = json.Length
                    }, 0, json.Length);

                    Assert.AreEqual(1, tokenizer.TokenNextIndex);
                    Assert.AreEqual(TokenType.Object, tokenizer.Tokens[0].Type);
                    Assert.AreEqual(-1, tokenizer.Tokens[0].Parent);
                    Assert.AreNotEqual(-1, tokenizer.Tokens[0].End);
                }
            }
        }
コード例 #16
0
 public unsafe void JsonTokenizer_Write_InvalidJson(string json)
 {
     using (var tokenizer = new JsonTokenizer(4)
     {
         AllowTokenBufferResize = true
     })
     {
         Assert.Throws <InvalidJsonException>(() =>
         {
             fixed(char *ptr = json)
             {
                 tokenizer.Write(new UnsafeBuffer <char> {
                     Buffer = ptr, Length = json.Length
                 }, 0, json.Length);
             }
         });
     }
 }
コード例 #17
0
        public unsafe void JsonTokenizer_DiscardCompleted_Parts(string input, int expectedCountBeforeDiscard, int expectedCountAfterDiscard)
        {
            var parts = input.Split('|');

            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                foreach (var json in parts)
                {
                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                    }
                }

                Assert.AreEqual(expectedCountBeforeDiscard, tokenizer.TokenNextIndex);
                tokenizer.DiscardCompleted();
                Assert.AreEqual(expectedCountAfterDiscard, tokenizer.TokenNextIndex);
            }
        }
コード例 #18
0
        public unsafe void JsonTokenizer_Write_TokenBufferOverflow_Throws()
        {
            const string json = @"{""foo"": 123, ""bar"": 456}";

            using (var tokenizer = new JsonTokenizer(4)
            {
                AllowTokenBufferResize = false
            })
            {
                Assert.Throws <BufferOverflowException>(() =>
                {
                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char> {
                            Buffer = ptr, Length = json.Length
                        }, 0, json.Length);
                    }
                });
            }
        }
コード例 #19
0
        public unsafe void PackedBinaryWriter_Write_PartialKey(params object[] parts)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
                using (var stream = new PackedBinaryStream(Allocator.TempJob))
                    using (var writer = new PackedBinaryWriter(stream, tokenizer))
                    {
                        foreach (string json in parts)
                        {
                            fixed(char *ptr = json)
                            {
                                var buffer = new UnsafeBuffer <char> {
                                    Buffer = ptr, Length = json.Length
                                };

                                tokenizer.Write(buffer, 0, json.Length);
                                writer.Write(buffer, tokenizer.TokenNextIndex);
                            }
                        }

                        stream.DiscardCompleted();
                    }
        }
コード例 #20
0
        public unsafe void PackedBinaryWriter_Write_PartialKey(string input)
        {
            var parts = input.Split('|');

            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
                using (var stream = new PackedBinaryStream(Allocator.TempJob))
                    using (var writer = new PackedBinaryWriter(stream, tokenizer, Allocator.TempJob))
                    {
                        foreach (var json in parts)
                        {
                            fixed(char *ptr = json)
                            {
                                var buffer = new UnsafeBuffer <char>(ptr, json.Length);

                                tokenizer.Write(buffer, 0, json.Length);
                                writer.Write(buffer, tokenizer.TokenNextIndex);
                            }
                        }

                        stream.DiscardCompleted();
                    }
        }
コード例 #21
0
        public unsafe void JsonTokenizer_DiscardCompleted_Parts(string[] parts, int expectedCountBeforeDiscard, int expectedCountAfterDiscard)
        {
            using (var tokenizer = new JsonTokenizer())
            {
                foreach (var json in parts)
                {
                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char> {
                            Buffer = ptr, Length = json.Length
                        }, 0, json.Length);
                    }
                }

                Print(tokenizer);
                Debug.Log(tokenizer.TokenParentIndex);
                // Assert.AreEqual(expectedCountBeforeDiscard, tokenizer.TokenNextIndex);
                tokenizer.DiscardCompleted();
                Print(tokenizer);
                Debug.Log(tokenizer.TokenParentIndex);
                //Assert.AreEqual(expectedCountAfterDiscard, tokenizer.TokenNextIndex);
            }
        }