unsafe bool FillBuffer()
        {
            if (m_Parser.TokenNextIndex < m_Tokenizer.TokenNextIndex || m_Parser.NodeType != NodeType.None)
            {
                return(false);
            }

            m_Block = m_BlockReader.GetNextBlock();

            if (null == m_Block || m_Block.Length == 0)
            {
                m_Block = null;
                return(false);
            }

            m_Tokenizer.DiscardCompleted();
            m_Parser.Seek(m_Tokenizer.TokenNextIndex, m_Tokenizer.TokenParentIndex);
            m_BinaryWriter.Seek(m_Tokenizer.TokenNextIndex, m_BinaryWriter.TokenParentIndex != -1
                                    ? m_Tokenizer.DiscardRemap[m_BinaryWriter.TokenParentIndex]
                                    : -1);

            fixed(char *ptr = m_Block.Buffer)
            {
                m_Tokenizer.Write(new UnsafeBuffer <char> {
                    Buffer = ptr, Length = m_Block.Buffer.Length
                }, 0, m_Block.Length);
            }

            return(true);
        }
        public unsafe void JsonTokenizer_DiscardCompleted(string json, int expectedCountBeforeDiscard, int expectedCountAfterDiscard)
        {
            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                fixed(char *ptr = json)
                {
                    tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                }

                Assert.AreEqual(expectedCountBeforeDiscard, tokenizer.TokenNextIndex);

                tokenizer.DiscardCompleted();

                Assert.AreEqual(expectedCountAfterDiscard, tokenizer.TokenNextIndex);
            }
        }
        public unsafe void JsonTokenizer_DiscardCompleted_Parts(string input, int expectedCountBeforeDiscard, int expectedCountAfterDiscard)
        {
            var parts = input.Split('|');

            using (var tokenizer = new JsonTokenizer(Allocator.TempJob))
            {
                foreach (var json in parts)
                {
                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char>(ptr, json.Length), 0, json.Length);
                    }
                }

                Assert.AreEqual(expectedCountBeforeDiscard, tokenizer.TokenNextIndex);
                tokenizer.DiscardCompleted();
                Assert.AreEqual(expectedCountAfterDiscard, tokenizer.TokenNextIndex);
            }
        }
Ejemplo n.º 4
0
        public unsafe void JsonTokenizer_DiscardCompleted_Parts(string[] parts, int expectedCountBeforeDiscard, int expectedCountAfterDiscard)
        {
            using (var tokenizer = new JsonTokenizer())
            {
                foreach (var json in parts)
                {
                    fixed(char *ptr = json)
                    {
                        tokenizer.Write(new UnsafeBuffer <char> {
                            Buffer = ptr, Length = json.Length
                        }, 0, json.Length);
                    }
                }

                Print(tokenizer);
                Debug.Log(tokenizer.TokenParentIndex);
                // Assert.AreEqual(expectedCountBeforeDiscard, tokenizer.TokenNextIndex);
                tokenizer.DiscardCompleted();
                Print(tokenizer);
                Debug.Log(tokenizer.TokenParentIndex);
                //Assert.AreEqual(expectedCountAfterDiscard, tokenizer.TokenNextIndex);
            }
        }