/// <inheritdoc /> /// <summary> /// Writes <see cref="T:Unity.Serialization.Token" /> objects to the internal buffer. /// </summary>s /// <param name="buffer">A character array containing the input json data to tokenize.</param> /// <param name="start">The index of ptr at which to begin reading.</param> /// <param name="count">The maximum number of characters to read.</param> /// <returns>The number of characters that have been read.</returns> public int Write(UnsafeBuffer <char> buffer, int start, int count) { if (start + count > buffer.Length) { throw new ArgumentOutOfRangeException(); } var position = start; for (;;) { var output = new TokenizeJobOutput(); var handle = new TokenizeJob { Output = &output, CharBuffer = (ushort *)buffer.Buffer, CharBufferLength = start + count, CharBufferPosition = position, PrevChar = m_Data->PrevChar, CommentType = m_Data->CommentType, Tokens = m_Data->JsonTokens, TokensLength = m_Data->BufferSize, TokensNextIndex = m_Data->TokenNextIndex, TokenParentIndex = m_Data->TokenParentIndex, Label = m_Label }.Schedule(); var validation = default(JobHandle); switch (m_Data->ValidationType) { case JsonValidationType.Standard: validation = m_Data->StandardValidator.ScheduleValidation(buffer, position, count); break; case JsonValidationType.Simple: validation = m_Data->SimpleValidator.ScheduleValidation(buffer, position, count); break; } JobHandle.CombineDependencies(handle, validation).Complete(); var result = default(JsonValidationResult); switch (m_Data->ValidationType) { case JsonValidationType.Standard: result = m_Data->StandardValidator.GetResult(); break; case JsonValidationType.Simple: result = m_Data->SimpleValidator.GetResult(); break; } if (!result.IsValid() && result.ActualType != JsonType.EOF) { throw new InvalidJsonException(result.ToString()) { Line = result.LineCount, Character = result.CharCount }; } position = output.BufferPosition; m_Data->JsonTokens = output.Tokens; m_Data->TokenNextIndex = output.TokenNextIndex; m_Data->TokenParentIndex = output.TokenParentIndex; m_Data->PrevChar = output.PrevChar; m_Data->CommentType = output.CommentType; if (output.TokensLength != m_Data->BufferSize) { m_Data->DiscardRemap = NativeArrayUtility.Resize(m_Data->DiscardRemap, m_Data->BufferSize, output.TokensLength, 4, m_Label); m_Data->BufferSize = output.TokensLength; } if (output.Result == k_ResultInvalidInput) { // No validation pass was performed. // The tokenizer has failed with something that was structurally invalid. throw new InvalidJsonException($"Input json was structurally invalid. Try with {nameof(JsonValidationType)}=[Standard or Simple]") { Line = -1, Character = -1 }; } return(position - start); } }
/// <inheritdoc /> /// <summary> /// Writes <see cref="T:Unity.Serialization.Token" /> objects to the internal buffer. /// </summary>s /// <param name="buffer">A character array containing the input json data to tokenize.</param> /// <param name="start">The index of ptr at which to begin reading.</param> /// <param name="count">The maximum number of characters to read.</param> /// <returns>The number of characters that have been read.</returns> public unsafe int Write(UnsafeBuffer <char> buffer, int start, int count) { if (start + count > buffer.Length) { throw new ArgumentOutOfRangeException(); } var position = start; var validation = m_Validator.ValidateAsync(buffer, position, count); for (;;) { var output = new TokenizeJobOutput(); var job = new TokenizeJob { Output = &output, CharBuffer = (ushort *)buffer.Buffer, CharBufferLength = start + count, CharBufferPosition = position, PrevChar = m_PrevChar, Tokens = (Token *)m_JsonTokens.GetUnsafePtr(), TokensLength = m_JsonTokens.Length, TokensNextIndex = m_TokenNextIndex, TokenParentIndex = m_TokenParentIndex }; if (m_ValidationType == JsonValidationType.None) { job.Run(); } else { job.Schedule().Complete(); } position = output.BufferPosition; m_TokenNextIndex = output.TokenNextIndex; m_TokenParentIndex = output.TokenParentIndex; m_PrevChar = output.PrevChar; if (output.Result == k_ResultTokenBufferOverflow) { if (!AllowTokenBufferResize) { throw new BufferOverflowException($"Token buffer overflow TokenNextIndex=[{output.TokenNextIndex}]. Use a larger buffer or set AllowTokenBufferResize=[True]"); } m_JsonTokens = NativeArrayUtility.Resize(m_JsonTokens, m_JsonTokens.Length * 2, m_Label, NativeArrayOptions.UninitializedMemory); m_DiscardRemap = NativeArrayUtility.Resize(m_DiscardRemap, m_DiscardRemap.Length * 2, m_Label, NativeArrayOptions.UninitializedMemory); continue; } validation.Complete(); var result = m_Validator.GetResult(); if (!result.IsValid() && result.ActualType != JsonType.EOF || output.Result == k_ResultInvalidInput) { if (m_ValidationType == JsonValidationType.None) { // No validation pass was performed. // The tokenizer has failed with something that was structurally invalid. throw new InvalidJsonException($"Input json was structurally invalid. Try with {nameof(JsonValidationType)}=[Standard or Simple]") { Line = -1, Character = -1 }; } throw new InvalidJsonException(result.ToString()) { Line = result.LineCount, Character = result.CharCount }; } return(position - start); } }
/// <inheritdoc /> /// <summary> /// Writes <see cref="T:Unity.Serialization.Token" /> objects to the internal buffer. /// </summary>s /// <param name="buffer">A character array containing the input json data to tokenize.</param> /// <param name="start">The index of ptr at which to begin reading.</param> /// <param name="count">The maximum number of characters to read.</param> /// <param name="isFinalBlock">A value indicating if this is the final block of characters from a stream. This will trigger an error for any unclosed scopes.</param> /// <returns>The number of characters that have been read.</returns> public int Write(UnsafeBuffer <char> buffer, int start, int count, bool isFinalBlock = false) { if (start + count > buffer.Length) { throw new ArgumentOutOfRangeException(); } var position = start; for (;;) { var output = new TokenizeJobOutput(); var handle = new TokenizeJob { Output = &output, CharBuffer = (ushort *)buffer.Buffer, CharBufferLength = start + count, CharBufferPosition = position, PrevChar = m_Data->PrevChar, CommentType = m_Data->CommentType, Tokens = m_Data->JsonTokens, TokensLength = m_Data->BufferSize, TokensNextIndex = m_Data->TokenNextIndex, TokenParentIndex = m_Data->TokenParentIndex, Label = m_Label }.Schedule(); var validation = default(JobHandle); switch (m_Data->ValidationType) { case JsonValidationType.Standard: validation = m_Data->StandardValidator.ScheduleValidation(buffer, position, count); break; case JsonValidationType.Simple: validation = m_Data->SimpleValidator.ScheduleValidation(buffer, position, count); break; } JobHandle.CombineDependencies(handle, validation).Complete(); var result = default(JsonValidationResult); switch (m_Data->ValidationType) { case JsonValidationType.Standard: result = m_Data->StandardValidator.GetResult(); break; case JsonValidationType.Simple: result = m_Data->SimpleValidator.GetResult(); break; } position = output.BufferPosition; m_Data->JsonTokens = output.Tokens; m_Data->TokenNextIndex = output.TokenNextIndex; m_Data->TokenParentIndex = output.TokenParentIndex; m_Data->PrevChar = output.PrevChar; m_Data->CommentType = output.CommentType; if (!result.IsValid()) { // The JSON is considered invalid at this point. However we have a few special cases to consider. if (result.ActualType == JsonType.EOF && !isFinalBlock) { // The last received token was an end of stream token but we are still waiting on more characters. // We can safely ignore this error for now. } else if (TokenNextIndex == 1 && Tokens[0].Type == TokenType.Primitive) { // This is a single primitive value. We can deserialize safely and accept this as valid. } else { throw new InvalidJsonException(result.ToString()) { Line = result.LineCount, Character = result.CharCount }; } } if (output.TokensLength != m_Data->BufferSize) { m_Data->DiscardRemap = NativeArrayUtility.Resize(m_Data->DiscardRemap, m_Data->BufferSize, output.TokensLength, 4, m_Label); m_Data->BufferSize = output.TokensLength; } if (output.Result == k_ResultInvalidInput) { // No validation pass was performed. // The tokenizer has failed with something that was structurally invalid. throw new InvalidJsonException($"Input json was structurally invalid. Try with {nameof(JsonValidationType)}=[Standard or Simple]") { Line = -1, Character = -1 }; } return(position - start); } }