unsafe int GetViewIndex(int node, int inputTokenStart, int binaryTokenStart) { if (node == -1) { return(-1); } var data = m_BinaryStream.GetUnsafeData(); if (node >= inputTokenStart) { // This is a newly written token. // Since we know tokens are written in order; we can simply compute the offset. var offset = m_Parser.TokenNextIndex - node; return(data->TokenNextIndex - offset); } // This is a previously written token. // Since we know we can never discard an incomplete token. // We must walk up the tree the same number of times for both streams to find the correct token. var binaryIndex = binaryTokenStart; var binaryTokens = m_BinaryStream.GetUnsafeData()->Tokens; while (inputTokenStart != node) { inputTokenStart = m_Tokenizer.Tokens[inputTokenStart].Parent; binaryIndex = binaryTokens[binaryIndex].Parent; } return(binaryIndex); }
internal SerializedValueView GetView(int index) { var data = m_Stream.GetUnsafeData(); if ((uint)index >= (uint)data->TokenNextIndex) { throw new IndexOutOfRangeException(); } var token = data->Tokens[index]; var handle = data->Handles[token.HandleIndex]; return(new SerializedValueView(m_Stream, new Handle { Index = token.HandleIndex, Version = handle.DataVersion })); }
internal Handle GetHandle(int tokenIndex) { var handleData = m_PackedBinaryStream.GetUnsafeData()->Handles[m_Tokens[tokenIndex].HandleIndex]; return(new Handle { Index = m_Tokens[tokenIndex].HandleIndex, Version = handleData.DataVersion }); }
public UnsafePackedBinaryStream(PackedBinaryStream stream) { m_PackedBinaryStream = stream; m_Tokens = m_PackedBinaryStream.GetUnsafeData()->Tokens; m_Buffer = m_PackedBinaryStream.GetUnsafeData()->Buffer; }