private static JsonDocument CreateForLiteral(JsonTokenType tokenType) { switch (tokenType) { case JsonTokenType.False: s_falseLiteral ??= Create(JsonConstants.FalseValue.ToArray()); return(s_falseLiteral); case JsonTokenType.True: s_trueLiteral ??= Create(JsonConstants.TrueValue.ToArray()); return(s_trueLiteral); default: Debug.Assert(tokenType == JsonTokenType.Null); s_nullLiteral ??= Create(JsonConstants.NullValue.ToArray()); return(s_nullLiteral); } JsonDocument Create(byte[] utf8Json) { MetadataDb database = MetadataDb.CreateLocked(utf8Json.Length); database.Append(tokenType, startLocation: 0, utf8Json.Length); return(new JsonDocument(utf8Json, database)); } }
private static JsonDocument Parse( ReadOnlyMemory <byte> utf8Json, JsonReaderOptions readerOptions, byte[]?extraRentedArrayPoolBytes = null, PooledByteBufferWriter?extraPooledByteBufferWriter = null) { ReadOnlySpan <byte> utf8JsonSpan = utf8Json.Span; var database = MetadataDb.CreateRented(utf8Json.Length, convertToAlloc: false); var stack = new StackRowStack(JsonDocumentOptions.DefaultMaxDepth * StackRow.Size); try { Parse(utf8JsonSpan, readerOptions, ref database, ref stack); } catch { database.Dispose(); throw; } finally { stack.Dispose(); } return(new JsonDocument(utf8Json, database, extraRentedArrayPoolBytes, extraPooledByteBufferWriter)); }
private static NbtDocument Parse( ReadOnlyMemory <byte> data, NbtOptions options, byte[]?extraRentedBytes, out int bytesConsumed) { ReadOnlySpan <byte> dataSpan = data.Span; var database = new MetadataDb(data.Length); var stack = new ByteStack <ContainerFrame>(NbtOptions.DefaultMaxDepth, clearOnReturn: false); var readerState = new NbtReaderState(options); var reader = new NbtReader(dataSpan, isFinalBlock: true, readerState); try { Parse(ref reader, ref database, ref stack); bytesConsumed = (int)reader.BytesConsumed; } catch { database.Dispose(); throw; } finally { readerState.Dispose(); stack.Dispose(); } return(new NbtDocument(data, options, database, extraRentedBytes, isDisposable: true)); }
private static JsonDocument Parse( ReadOnlyMemory <byte> utf8Json, JsonReaderOptions readerOptions, byte[]?extraRentedBytes) { ReadOnlySpan <byte> utf8JsonSpan = utf8Json.Span; var database = new MetadataDb(utf8Json.Length); var stack = new StackRowStack(JsonDocumentOptions.DefaultMaxDepth * StackRow.Size); try { Parse(utf8JsonSpan, readerOptions, ref database, ref stack); } catch { database.Dispose(); throw; } finally { stack.Dispose(); } return(new JsonDocument(utf8Json, database, extraRentedBytes)); }
//public static NbtDocument ParseValue(ref NbtReader reader) //{ // //} // //public static bool TryParseValue(ref NbtReader reader, out NbtDocument document) //{ //} private static void Parse( ref NbtReader reader, ref MetadataDb database, ref ByteStack <ContainerFrame> stack) { int rowCount = 0; MetadataDb.Accessor accessor = new(ref database); NbtReadStatus status; while ((status = reader.TryRead()) == NbtReadStatus.Done) { int location = reader.TagLocation; NbtType type = reader.TagType; NbtFlags flags = reader.TagFlags; PeekStack: ref ContainerFrame frame = ref stack.TryPeek(); if (!Unsafe.IsNullRef(ref frame)) { if (frame.ListEntriesRemaining == 0) { stack.TryPop(); int totalRowCount = rowCount - frame.InitialRowCount; accessor.GetRow(frame.ContainerRow).RowCount = totalRowCount; goto PeekStack; } else if (frame.ListEntriesRemaining != -1) { frame.ListEntriesRemaining--; } else { frame.CompoundEntryCounter++; } } switch (type) { case NbtType.End: { // Documents with a single End tag (no Compound root) are valid. if (stack.TryPop(out ContainerFrame compoundFrame)) { int totalRowCount = rowCount - compoundFrame.InitialRowCount; int compoundLength = compoundFrame.CompoundEntryCounter - 1; // -1 to exclude End ref DbRow row = ref accessor.GetRow(compoundFrame.ContainerRow); row.RowCount = totalRowCount; row.CollectionLength = compoundLength; } continue; // Continue to not increment row count }
public MetadataDb(MetadataDb source, bool useArrayPools) { ByteLength = source.ByteLength; if (useArrayPools) { _data = ArrayPool <byte> .Shared.Rent(ByteLength); source._data.AsSpan(0, ByteLength).CopyTo(_data); } else { _data = source._data.AsSpan(0, ByteLength).ToArray(); } }
internal MetadataDb(MetadataDb source, bool useArrayPools) { Length = source.Length; #if DEBUG _isLocked = !useArrayPools; #endif if (useArrayPools) { _data = ArrayPool <byte> .Shared.Rent(Length); source._data.AsSpan(0, Length).CopyTo(_data); } else { _data = source._data.AsSpan(0, Length).ToArray(); } }
private static JsonDocument ParseUnrented( ReadOnlyMemory <byte> utf8Json, JsonReaderOptions readerOptions, JsonTokenType tokenType = JsonTokenType.None) { // These tokens should already have been processed. Debug.Assert( tokenType != JsonTokenType.Null && tokenType != JsonTokenType.False && tokenType != JsonTokenType.True); ReadOnlySpan <byte> utf8JsonSpan = utf8Json.Span; MetadataDb database; if (tokenType == JsonTokenType.String || tokenType == JsonTokenType.Number) { // For primitive types, we can avoid renting MetadataDb and creating StackRowStack. database = MetadataDb.CreateLocked(utf8Json.Length); StackRowStack stack = default; Parse(utf8JsonSpan, readerOptions, ref database, ref stack); } else { database = MetadataDb.CreateRented(utf8Json.Length, convertToAlloc: true); var stack = new StackRowStack(JsonDocumentOptions.DefaultMaxDepth * StackRow.Size); try { Parse(utf8JsonSpan, readerOptions, ref database, ref stack); } finally { stack.Dispose(); } } return(new JsonDocument(utf8Json, database)); }
//public static NbtDocument ParseValue(ref NbtReader reader) //{ // //} // //public static bool TryParseValue(ref NbtReader reader, out NbtDocument document) //{ //} private static void Parse( ref NbtReader reader, ref MetadataDb database, ref ByteStack <ContainerFrame> stack) { int rowCount = 0; while (reader.Read()) { int location = reader.TagLocation; NbtType type = reader.TagType; NbtFlags flags = reader.TagFlags; PeekStack: ref ContainerFrame frame = ref stack.TryPeek(); if (!Unsafe.IsNullRef(ref frame)) { if (frame.ListEntriesRemaining == 0) { stack.TryPop(); int totalRowCount = rowCount - frame.InitialRowCount; database.SetRowCount(frame.ContainerRow, totalRowCount); goto PeekStack; } else if (frame.ListEntriesRemaining != -1) { frame.ListEntriesRemaining--; } else { frame.CompoundEntryCounter++; } } switch (type) { case NbtType.End: { // Documents with a single End tag (no Compound root) are valid. if (stack.TryPop(out var compoundFrame)) { int totalRowCount = rowCount - compoundFrame.InitialRowCount; int compoundLength = compoundFrame.CompoundEntryCounter - 1; // -1 to exclude End database.SetRowCount(compoundFrame.ContainerRow, totalRowCount); database.SetLength(compoundFrame.ContainerRow, compoundLength); } continue; // Continue to not increment row count } case NbtType.Compound: { int containerRow = database.Append( location, collectionLength: 0, rowCount: 1, type, flags); stack.Push(new ContainerFrame(containerRow, rowCount) { ListEntriesRemaining = -1 }); break; } case NbtType.List: { int listLength = reader.TagCollectionLength; int containerRow = database.Append( location, listLength, rowCount: 1, type, flags); stack.Push(new ContainerFrame(containerRow, rowCount) { ListEntriesRemaining = listLength }); break; } default: database.Append(location, reader.TagCollectionLength, rowCount: 1, type, flags); break; } rowCount++; } database.TrimExcess(); }
internal Accessor(ref MetadataDb database) { _database = MemoryMarshal.CreateSpan(ref database, 1); RowData = _database[0]._data.AsSpan(); }