public Guid Deserialize(ref MessagePackReader reader, MessagePackSerializerOptions options) { ReadOnlySequence <byte> segment = reader.ReadStringSequence().Value; if (segment.Length != 36) { throw new MessagePackSerializationException("Unexpected length of string."); } GuidBits result; if (segment.IsSingleSegment) { result = new GuidBits(segment.First.Span); } else { Span <byte> bytes = stackalloc byte[36]; segment.CopyTo(bytes); result = new GuidBits(bytes); } return(result.Value); }
public static bool SequenceEqual(ReadOnlySequence <byte> first, ReadOnlySpan <byte> second) { if (first.IsEmpty && second.IsEmpty) { return(true); } if (first.Length != second.Length) { return(false); } if (first.IsSingleSegment) { return(first.FirstSpan.SequenceEqual(second)); } Span <byte> buffer = stackalloc byte[second.Length]; first.CopyTo(buffer); return(buffer.SequenceEqual(second)); }
/// <summary> /// Process the contents of a stream. /// </summary> /// <param name="stream">The stream to process.</param> /// <param name="processor">Handler for the parsed lines.</param> /// <returns>A task that completes when the stream has been processed.</returns> public static async Task ParseStreamAsync( Stream stream, INmeaLineStreamProcessor processor) { int lines = 0; int ticksAtStart = Environment.TickCount; int ticksAtLastLineCount = ticksAtStart; try { PipeReader reader = CreateFileReader(stream); byte[] splitLineBuffer = new byte[1000]; while (true) { ReadResult result = await reader.ReadAsync().ConfigureAwait(false); ReadOnlySequence <byte> remainingSequence = ProcessBuffer(result); reader.AdvanceTo(remainingSequence.Start, remainingSequence.End); if (result.IsCompleted) { break; } } const int LineCountInterval = 100000; int finalTicks = Environment.TickCount; int totalTicks = finalTicks - ticksAtStart; processor.Progress( true, lines, totalTicks, lines % LineCountInterval, finalTicks - ticksAtLastLineCount); ReadOnlySequence <byte> ProcessBuffer( in ReadResult result) { ReadOnlySpan <byte> lineSpan; SequencePosition? position = null; ReadOnlySequence <byte> remainingSequence = result.Buffer; while ((position = remainingSequence.PositionOf((byte)'\n') ?? (remainingSequence.IsEmpty || !result.IsCompleted ? default(SequencePosition?) : remainingSequence.End)) != null) { ReadOnlySequence <byte> line = remainingSequence.Slice(remainingSequence.Start, position.Value); if (line.IsSingleSegment) { lineSpan = line.First.Span; } else { Span <byte> reassemblySpan = splitLineBuffer; line.CopyTo(reassemblySpan); lineSpan = reassemblySpan.Slice(0, (int)line.Length); } if (lineSpan.Length > 0 && lineSpan[lineSpan.Length - 1] == (byte)'\r') { lineSpan = lineSpan.Slice(0, lineSpan.Length - 1); } if (lineSpan.Length > 0) { var parsedLine = new NmeaLineParser(lineSpan); processor.OnNext(parsedLine); } remainingSequence = position.Value.Equals(remainingSequence.End) ? remainingSequence.Slice(remainingSequence.End) : remainingSequence.Slice(remainingSequence.GetPosition(1, position.Value)); if (++lines % LineCountInterval == 0) { int currentTicks = Environment.TickCount; int ticksSinceLastLineCount = currentTicks - ticksAtLastLineCount; processor.Progress( false, lines, currentTicks - ticksAtStart, LineCountInterval, ticksSinceLastLineCount); ticksAtLastLineCount = currentTicks; } } return(remainingSequence); } } finally { processor.OnCompleted(); } }
public TFrameList Deserialize(ref MessagePackReader reader, MessagePackSerializerOptions options) { if (reader.TryReadNil()) { return((TFrameList)(IList <T>)null); } Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth); try { options.Security.DepthStep(ref reader); try { FrameFormatterSerializationOptions frameOptions = options.GetOptionParams(); if (frameOptions.MthWorkerConfig.MaxConcurrentTasks == 1 || ParallelGatekeeperSingleton.wrapperDepth > 1) { return(DeserializeSynchronous(ref reader, options)); } var readerBackup = reader.CreatePeekReader(); int count = reader.ReadArrayHeader(); if (count == 0) { reader = readerBackup; return(DeserializeSynchronous(ref reader, options)); } var peekreader = reader.CreatePeekReader(); if (FrameItemFormatter <T> .ReadElementHeader(ref peekreader) == Frame <T> .unassigned) { if (frameOptions.ThrowOnUnnasignedFrameDeserialization) { throw new StreamSerializationException($"Unassigned buffer length found during parallel deserialize for {nameof(TFrameList)}"); } reader = readerBackup; return(DeserializeSynchronous(ref reader, options)); } IMessagePackFormatter <T> formatterT = options.Resolver.GetFormatterWithVerify <T>(); ListFrameWrapper valueWrapper = GetTFrameListWrapper(count); Frame <T>[] resItems = valueWrapper.AsFrameArray(); BatchSizeEstimator batchEstimator = new BatchSizeEstimator(frameOptions.BatchSizeEstimatorConfig); void ProcessBatch(BatchWithBufferWritersAndElementOffset batch, CancellationToken token) { try { ReadOnlySpan <int> lengths = batch.buffers.lengths.WrittenSpan; ReadOnlyMemory <byte> bodies = batch.buffers.concatenatedBodies.WrittenMemory; int batchSize = batch.buffers.lengths.WrittenCount; var destSpan = resItems.AsSpan(batch.offset, batchSize); for (int ix = 0, bodyStartIx = 0; ix < batchSize; ix++) { int itemLen = lengths[ix]; ReadOnlyMemory <byte> body = bodies.Slice(bodyStartIx, itemLen); MessagePackReader tmpReader = new MessagePackReader(body) { CancellationToken = token }; destSpan[ix].BufferLength = body.Length; destSpan[ix].Item = formatterT.Deserialize(ref tmpReader, options); bodyStartIx += itemLen; } } finally { objPoolBufferWriterBodies.Return(batch.buffers.concatenatedBodies); objPoolBufferWriterBodyLengths.Return(batch.buffers.lengths); } } using (var mtw = new MultiThreadedWorker <BatchWithBufferWritersAndElementOffset>( frameOptions.MthWorkerConfig, ProcessBatch)) { int i = 0; while (i < count) { int batchSize = Math.Min(count - i, batchEstimator.RecomendedBatchSize); var currentBatch = new BatchWithBufferWritersAndElementOffset() { offset = i, buffers = new BatchWithBufferWriters() { concatenatedBodies = objPoolBufferWriterBodies.Get(), lengths = objPoolBufferWriterBodyLengths.Get() } }; for (int seqIx = 0; seqIx < batchSize; seqIx++) { int itemLength = FrameItemFormatter <T> .ReadElementHeader(ref reader); if (itemLength == Frame <T> .unassigned) { throw new StreamSerializationException($"Unassigned buffer length found during parallel deserialize for {nameof(TFrameList)}"); } currentBatch.buffers.lengths.GetSpan(1)[0] = itemLength; currentBatch.buffers.lengths.Advance(1); ReadOnlySequence <byte> raw = reader.ReadRaw(itemLength); raw.CopyTo(currentBatch.buffers.concatenatedBodies.GetSpan(itemLength)); currentBatch.buffers.concatenatedBodies.Advance(itemLength); batchEstimator.UpdateEstimate(itemLength); } mtw.AddWorkItem(currentBatch, reader.CancellationToken); i += batchSize; } } return(valueWrapper.AsFrameList()); } finally { reader.Depth--; } } finally { Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth); } }
private async Task ProcessInternal(PipeReader reader) { ReadResult result = await reader.ReadAsync().ConfigureAwait(false); ReadOnlySequence <byte> buffer = result.Buffer; if (OnReceive == null) { // Tell the PipeReader how much of the buffer we have consumed reader.AdvanceTo(buffer.End); return; } WebSocketMessage <object> stub; try { if (buffer.IsSingleSegment) { stub = JsonSerializer.Deserialize <WebSocketMessage <object> >(buffer.FirstSpan, _jsonOptions); } else { var buf = ArrayPool <byte> .Shared.Rent(Convert.ToInt32(buffer.Length)); try { buffer.CopyTo(buf); stub = JsonSerializer.Deserialize <WebSocketMessage <object> >(buf, _jsonOptions); } finally { ArrayPool <byte> .Shared.Return(buf); } } } catch (JsonException ex) { // Tell the PipeReader how much of the buffer we have consumed reader.AdvanceTo(buffer.End); _logger.LogError(ex, "Error processing web socket message"); return; } // Tell the PipeReader how much of the buffer we have consumed reader.AdvanceTo(buffer.End); _logger.LogDebug("WS {IP} received message: {@Message}", RemoteEndPoint, stub); var info = new WebSocketMessageInfo { MessageType = stub.MessageType, Data = stub.Data?.ToString(), // Data can be null Connection = this }; if (info.MessageType.Equals("KeepAlive", StringComparison.Ordinal)) { await SendKeepAliveResponse(); } else { await OnReceive(info).ConfigureAwait(false); } }
private static void ReadValueCore(JsonSerializerOptions options, ref Utf8JsonReader reader, ref ReadStack readStack) { JsonReaderState state = reader.CurrentState; CheckSupportedOptions(state.Options, nameof(reader)); // Value copy to overwrite the ref on an exception and undo the destructive reads. Utf8JsonReader restore = reader; ReadOnlySpan <byte> valueSpan = default; ReadOnlySequence <byte> valueSequence = default; try { switch (reader.TokenType) { // A new reader was created and has never been read, // so we need to move to the first token. // (or a reader has terminated and we're about to throw) case JsonTokenType.None: // Using a reader loop the caller has identified a property they wish to // hydrate into a JsonDocument. Move to the value first. case JsonTokenType.PropertyName: { if (!reader.Read()) { ThrowHelper.ThrowJsonReaderException(ref reader, ExceptionResource.ExpectedOneCompleteToken); } break; } } switch (reader.TokenType) { // Any of the "value start" states are acceptable. case JsonTokenType.StartObject: case JsonTokenType.StartArray: { long startingOffset = reader.TokenStartIndex; if (!reader.TrySkip()) { ThrowHelper.ThrowJsonReaderException(ref reader, ExceptionResource.NotEnoughData); } long totalLength = reader.BytesConsumed - startingOffset; ReadOnlySequence <byte> sequence = reader.OriginalSequence; if (sequence.IsEmpty) { valueSpan = reader.OriginalSpan.Slice( checked ((int)startingOffset), checked ((int)totalLength)); } else { valueSequence = sequence.Slice(startingOffset, totalLength); } Debug.Assert( reader.TokenType == JsonTokenType.EndObject || reader.TokenType == JsonTokenType.EndArray); break; } // Single-token values case JsonTokenType.Number: case JsonTokenType.True: case JsonTokenType.False: case JsonTokenType.Null: { if (reader.HasValueSequence) { valueSequence = reader.ValueSequence; } else { valueSpan = reader.ValueSpan; } break; } // String's ValueSequence/ValueSpan omits the quotes, we need them back. case JsonTokenType.String: { ReadOnlySequence <byte> sequence = reader.OriginalSequence; if (sequence.IsEmpty) { // Since the quoted string fit in a ReadOnlySpan originally // the contents length plus the two quotes can't overflow. int payloadLength = reader.ValueSpan.Length + 2; Debug.Assert(payloadLength > 1); ReadOnlySpan <byte> readerSpan = reader.OriginalSpan; Debug.Assert( readerSpan[(int)reader.TokenStartIndex] == (byte)'"', $"Calculated span starts with {readerSpan[(int)reader.TokenStartIndex]}"); Debug.Assert( readerSpan[(int)reader.TokenStartIndex + payloadLength - 1] == (byte)'"', $"Calculated span ends with {readerSpan[(int)reader.TokenStartIndex + payloadLength - 1]}"); valueSpan = readerSpan.Slice((int)reader.TokenStartIndex, payloadLength); } else { long payloadLength = 2; if (reader.HasValueSequence) { payloadLength += reader.ValueSequence.Length; } else { payloadLength += reader.ValueSpan.Length; } valueSequence = sequence.Slice(reader.TokenStartIndex, payloadLength); Debug.Assert( valueSequence.First.Span[0] == (byte)'"', $"Calculated sequence starts with {valueSequence.First.Span[0]}"); Debug.Assert( valueSequence.ToArray()[payloadLength - 1] == (byte)'"', $"Calculated sequence ends with {valueSequence.ToArray()[payloadLength - 1]}"); } break; } default: { byte displayByte; if (reader.HasValueSequence) { displayByte = reader.ValueSequence.First.Span[0]; } else { displayByte = reader.ValueSpan[0]; } ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedStartOfValueNotFound, displayByte); break; } } } catch (JsonReaderException ex) { reader = restore; // Re-throw with Path information. ThrowHelper.ReThrowWithPath(readStack, ex); } int length = valueSpan.IsEmpty ? checked ((int)valueSequence.Length) : valueSpan.Length; byte[] rented = ArrayPool <byte> .Shared.Rent(length); Span <byte> rentedSpan = rented.AsSpan(0, length); try { if (valueSpan.IsEmpty) { valueSequence.CopyTo(rentedSpan); } else { valueSpan.CopyTo(rentedSpan); } var newReader = new Utf8JsonReader(rentedSpan, isFinalBlock: true, state: default); ReadCore(options, ref newReader, ref readStack); // The reader should have thrown if we have remaining bytes. Debug.Assert(newReader.BytesConsumed == length); } catch (JsonException) { reader = restore; throw; } finally { rentedSpan.Clear(); ArrayPool <byte> .Shared.Return(rented); } }
public IKzWriter Add(ReadOnlySequence <byte> data) { data.CopyTo(Memory.Span.Slice(Length)); Length += (int)data.Length; return(this); }
public int Input(ReadOnlySequence <byte> span) { byte[] buffer = new byte[span.Length]; span.CopyTo(buffer); return(Input(buffer)); }
private static bool TryParseValue(ref Utf8JsonReader reader, out JsonDocument document, bool shouldThrow) { JsonReaderState state = reader.CurrentState; CheckSupportedOptions(state.Options, nameof(reader)); // Value copy to overwrite the ref on an exception and undo the destructive reads. Utf8JsonReader restore = reader; // Only used for StartArray or StartObject, // the beginning of the token is one byte earlier. long startingOffset = state.BytesConsumed; ReadOnlySpan <byte> valueSpan = default; ReadOnlySequence <byte> valueSequence = default; try { switch (reader.TokenType) { // A new reader was created and has never been read, // so we need to move to the first token. // (or a reader has terminated and we're about to throw) case JsonTokenType.None: // Using a reader loop the caller has identified a property they wish to // hydrate into a JsonDocument. Move to the value first. case JsonTokenType.PropertyName: { if (!reader.Read()) { if (shouldThrow) { ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedJsonTokens); } reader = restore; document = null; return(false); } // Reset the starting position since we moved. startingOffset = reader.BytesConsumed; break; } } switch (reader.TokenType) { // Any of the "value start" states are acceptable. case JsonTokenType.StartObject: case JsonTokenType.StartArray: { // Placeholder until reader.Skip() is written (#33295) { int depth = reader.CurrentDepth; // CurrentDepth rises late and falls fast, // a payload of "[ 1, 2, 3, 4 ]" will report post-Read() // CurrentDepth values of { 0, 1, 1, 1, 1, 0 }, // Since we're logically at 0 ([), Read() once and keep // reading until we've come back down to 0 (]). do { if (!reader.Read()) { if (shouldThrow) { ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedJsonTokens); } reader = restore; document = null; return(false); } } while (reader.CurrentDepth > depth); } // Back up to be at the beginning of the { or [, vs the end. startingOffset--; long totalLength = reader.BytesConsumed - startingOffset; ReadOnlySequence <byte> sequence = reader.OriginalSequence; if (sequence.IsEmpty) { valueSpan = reader.OriginalSpan.Slice( checked ((int)startingOffset), checked ((int)totalLength)); } else { valueSequence = sequence.Slice(startingOffset, totalLength); } Debug.Assert( reader.TokenType == JsonTokenType.EndObject || reader.TokenType == JsonTokenType.EndArray); break; } // Single-token values case JsonTokenType.Number: case JsonTokenType.True: case JsonTokenType.False: case JsonTokenType.Null: { if (reader.HasValueSequence) { valueSequence = reader.ValueSequence; } else { valueSpan = reader.ValueSpan; } break; } // String's ValueSequence/ValueSpan omits the quotes, we need them back. case JsonTokenType.String: { ReadOnlySequence <byte> sequence = reader.OriginalSequence; if (sequence.IsEmpty) { // Since the quoted string fit in a ReadOnlySpan originally // the contents length plus the two quotes can't overflow. int payloadLength = reader.ValueSpan.Length + 2; Debug.Assert(payloadLength > 1); int openQuote = checked ((int)startingOffset) - payloadLength; ReadOnlySpan <byte> readerSpan = reader.OriginalSpan; Debug.Assert( readerSpan[openQuote] == (byte)'"', $"Calculated span starts with {readerSpan[openQuote]}"); Debug.Assert( readerSpan[(int)startingOffset - 1] == (byte)'"', $"Calculated span ends with {readerSpan[(int)startingOffset - 1]}"); valueSpan = readerSpan.Slice(openQuote, payloadLength); } else { long payloadLength = 2; if (reader.HasValueSequence) { payloadLength += reader.ValueSequence.Length; } else { payloadLength += reader.ValueSpan.Length; } valueSequence = sequence.Slice(startingOffset - payloadLength, payloadLength); Debug.Assert( valueSequence.First.Span[0] == (byte)'"', $"Calculated sequence starts with {valueSequence.First.Span[0]}"); } break; } default: { if (shouldThrow) { byte displayByte; if (reader.HasValueSequence) { displayByte = reader.ValueSequence.First.Span[0]; } else { displayByte = reader.ValueSpan[0]; } ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedStartOfValueNotFound, displayByte); } reader = restore; document = null; return(false); } } } catch { reader = restore; throw; } int length = valueSpan.IsEmpty ? checked ((int)valueSequence.Length) : valueSpan.Length; byte[] rented = ArrayPool <byte> .Shared.Rent(length); Span <byte> rentedSpan = rented.AsSpan(0, length); try { if (valueSpan.IsEmpty) { valueSequence.CopyTo(rentedSpan); } else { valueSpan.CopyTo(rentedSpan); } document = Parse(rented.AsMemory(0, length), state.Options, rented); return(true); } catch { // This really shouldn't happen since the document was already checked // for consistency by Skip. But if data mutations happened just after // the calls to Read then the copy may not be valid. rentedSpan.Clear(); ArrayPool <byte> .Shared.Return(rented); throw; } }
protected override int CopyTo(ReadOnlySequence <byte> source, Span <byte> target) { source.CopyTo(target); return(target.Length); }
private static void InitializeString(Span <char> output, ReadOnlySequence <char> input) => input.CopyTo(output);
private void InternalDequeue(ReadOnlySequence <byte> sequence, Action <ReadOnlySequence <byte> > action) { using var hub = new Hub(); try { if (_version.HasFlag(OmniSecureConnectionVersion.Version1) && _infoV1 != null) { if (_infoV1.CryptoAlgorithm.HasFlag(V1.Internal.CryptoAlgorithm.Aes_256) && _infoV1.HashAlgorithm.HasFlag(V1.Internal.HashAlgorithm.Sha2_256)) { const int headerSize = 8; const int hashLength = 32; const int blockSize = 16; Interlocked.Add(ref _totalReceivedSize, sequence.Length - (headerSize + hashLength)); // 送信済みデータ + 送信するデータのサイズが正しいか検証する { long totalReceivedSize; { Span <byte> totalReceiveSizeBuffer = stackalloc byte[headerSize]; sequence.Slice(0, headerSize).CopyTo(totalReceiveSizeBuffer); totalReceivedSize = (long)BinaryPrimitives.ReadUInt64BigEndian(totalReceiveSizeBuffer); } if (totalReceivedSize != _totalReceivedSize) { throw new OmniSecureConnectionException(); } } // HMACが正しいか検証する { Span <byte> receivedHash = stackalloc byte[hashLength]; sequence.Slice(sequence.Length - hashLength).CopyTo(receivedHash); var computedhash = Hmac_Sha2_256.ComputeHash(sequence.Slice(headerSize, sequence.Length - (headerSize + hashLength)), _infoV1.OtherHmacKey); if (!BytesOperations.SequenceEqual(receivedHash, computedhash)) { throw new OmniSecureConnectionException(); } } sequence = sequence.Slice(headerSize, sequence.Length - (headerSize + hashLength)); using (var aes = Aes.Create()) { aes.KeySize = 256; aes.Mode = CipherMode.CBC; aes.Padding = PaddingMode.PKCS7; // IVを読み込む var iv = new byte[16]; sequence.Slice(0, iv.Length).CopyTo(iv); sequence = sequence.Slice(iv.Length); // 暗号化されたデータを復号化する using (var decryptor = aes.CreateDecryptor(_infoV1.OtherCryptoKey, iv)) { var inBuffer = _bufferPool.GetArrayPool().Rent(blockSize); var outBuffer = _bufferPool.GetArrayPool().Rent(blockSize); try { while (sequence.Length > blockSize) { sequence.Slice(0, blockSize).CopyTo(inBuffer.AsSpan(0, blockSize)); var transed = decryptor.TransformBlock(inBuffer, 0, blockSize, outBuffer, 0); hub.Writer.Write(outBuffer.AsSpan(0, transed)); sequence = sequence.Slice(blockSize); } { int remain = (int)sequence.Length; sequence.CopyTo(inBuffer.AsSpan(0, remain)); var remainBuffer = decryptor.TransformFinalBlock(inBuffer, 0, remain); hub.Writer.Write(remainBuffer); hub.Writer.Complete(); } } finally { _bufferPool.GetArrayPool().Return(inBuffer); _bufferPool.GetArrayPool().Return(outBuffer); } } } action.Invoke(hub.Reader.GetSequence()); hub.Reader.Complete(); return; } } } catch (OmniSecureConnectionException e) { throw e; } catch (Exception e) { throw new OmniSecureConnectionException(e.Message, e); } throw new OmniSecureConnectionException("Conversion failed."); }
public async override ValueTask <ReadResult> ReadAsync(CancellationToken cancellationToken = default) { var innerResult = await _inner.ReadAsync(cancellationToken); if (innerResult.Buffer.IsEmpty) { _currentDecodedBuffer = innerResult.Buffer; _currentInnerBuffer = innerResult.Buffer; return(innerResult); } // Minimum valid base64 length is 4. Read until we have at least that much content while (innerResult.Buffer.Length < 4) { if (innerResult.IsCompleted) { // If the reader completes with less than 4 bytes then the base64 isn't valid.. throw new InvalidOperationException("Unexpected end of data when reading base64 content."); } if (innerResult.IsCanceled) { // Cancelled before we have enough data to decode. Return a cancelled result with no data. _currentDecodedBuffer = ReadOnlySequence <byte> .Empty; _currentInnerBuffer = innerResult.Buffer; return(new ReadResult( ReadOnlySequence <byte> .Empty, innerResult.IsCanceled, innerResult.IsCompleted)); } // Attempt to get more data _inner.AdvanceTo(innerResult.Buffer.Start, innerResult.Buffer.End); innerResult = await _inner.ReadAsync(cancellationToken); } // Limit result to complete base64 segments (multiples of 4) var buffer = innerResult.Buffer.Slice(0, (innerResult.Buffer.Length / 4) * 4); // The content can contain multiple fragments of base64 content // Check for padding, and limit returned data to one fragment at a time var paddingIndex = PositionOf(buffer, (byte)'='); if (paddingIndex != null) { _currentInnerBuffer = buffer.Slice(0, ((paddingIndex.Value / 4) + 1) * 4); } else { _currentInnerBuffer = buffer; } var length = (int)_currentInnerBuffer.Length; // Any rented buffer should have been returned Debug.Assert(_rentedBuffer == null); _rentedBuffer = ArrayPool <byte> .Shared.Rent(length); _currentInnerBuffer.CopyTo(_rentedBuffer); var validLength = (length / 4) * 4; var status = Base64.DecodeFromUtf8InPlace(_rentedBuffer.AsSpan(0, validLength), out var bytesWritten); if (status == OperationStatus.Done || status == OperationStatus.NeedMoreData) { _currentDecodedBuffer = new ReadOnlySequence <byte>(_rentedBuffer, 0, bytesWritten); return(new ReadResult( _currentDecodedBuffer, innerResult.IsCanceled, innerResult.IsCompleted)); } throw new InvalidOperationException("Unexpected status: " + status); }
/// <summary> /// Copies bytes directly into the message pack writer. /// </summary> /// <param name="rawMessagePackBlock">The span of bytes to copy from.</param> public void WriteRaw(ReadOnlySequence <byte> rawMessagePackBlock) => rawMessagePackBlock.CopyTo(ref writer);
public int Send(ReadOnlySequence <byte> span, object option = null) { byte[] buffer = new byte[span.Length]; span.CopyTo(buffer); return(Send(buffer)); }
internal static bool TryParseValue( ref Utf8JsonReader reader, [NotNullWhen(true)] out JsonDocument?document, bool shouldThrow, bool useArrayPools) { JsonReaderState state = reader.CurrentState; CheckSupportedOptions(state.Options, nameof(reader)); // Value copy to overwrite the ref on an exception and undo the destructive reads. Utf8JsonReader restore = reader; ReadOnlySpan <byte> valueSpan = default; ReadOnlySequence <byte> valueSequence = default; try { switch (reader.TokenType) { // A new reader was created and has never been read, // so we need to move to the first token. // (or a reader has terminated and we're about to throw) case JsonTokenType.None: // Using a reader loop the caller has identified a property they wish to // hydrate into a JsonDocument. Move to the value first. case JsonTokenType.PropertyName: { if (!reader.Read()) { if (shouldThrow) { ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedJsonTokens); } reader = restore; document = null; return(false); } break; } } switch (reader.TokenType) { // Any of the "value start" states are acceptable. case JsonTokenType.StartObject: case JsonTokenType.StartArray: { long startingOffset = reader.TokenStartIndex; if (!reader.TrySkip()) { if (shouldThrow) { ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedJsonTokens); } reader = restore; document = null; return(false); } long totalLength = reader.BytesConsumed - startingOffset; ReadOnlySequence <byte> sequence = reader.OriginalSequence; if (sequence.IsEmpty) { valueSpan = reader.OriginalSpan.Slice( checked ((int)startingOffset), checked ((int)totalLength)); } else { valueSequence = sequence.Slice(startingOffset, totalLength); } Debug.Assert( reader.TokenType == JsonTokenType.EndObject || reader.TokenType == JsonTokenType.EndArray); break; } case JsonTokenType.False: case JsonTokenType.True: case JsonTokenType.Null: if (useArrayPools) { if (reader.HasValueSequence) { valueSequence = reader.ValueSequence; } else { valueSpan = reader.ValueSpan; } break; } document = CreateForLiteral(reader.TokenType); return(true); case JsonTokenType.Number: { if (reader.HasValueSequence) { valueSequence = reader.ValueSequence; } else { valueSpan = reader.ValueSpan; } break; } // String's ValueSequence/ValueSpan omits the quotes, we need them back. case JsonTokenType.String: { ReadOnlySequence <byte> sequence = reader.OriginalSequence; if (sequence.IsEmpty) { // Since the quoted string fit in a ReadOnlySpan originally // the contents length plus the two quotes can't overflow. int payloadLength = reader.ValueSpan.Length + 2; Debug.Assert(payloadLength > 1); ReadOnlySpan <byte> readerSpan = reader.OriginalSpan; Debug.Assert( readerSpan[(int)reader.TokenStartIndex] == (byte)'"', $"Calculated span starts with {readerSpan[(int)reader.TokenStartIndex]}"); Debug.Assert( readerSpan[(int)reader.TokenStartIndex + payloadLength - 1] == (byte)'"', $"Calculated span ends with {readerSpan[(int)reader.TokenStartIndex + payloadLength - 1]}"); valueSpan = readerSpan.Slice((int)reader.TokenStartIndex, payloadLength); } else { long payloadLength = 2; if (reader.HasValueSequence) { payloadLength += reader.ValueSequence.Length; } else { payloadLength += reader.ValueSpan.Length; } valueSequence = sequence.Slice(reader.TokenStartIndex, payloadLength); Debug.Assert( valueSequence.First.Span[0] == (byte)'"', $"Calculated sequence starts with {valueSequence.First.Span[0]}"); Debug.Assert( valueSequence.ToArray()[payloadLength - 1] == (byte)'"', $"Calculated sequence ends with {valueSequence.ToArray()[payloadLength - 1]}"); } break; } default: { if (shouldThrow) { // Default case would only hit if TokenType equals JsonTokenType.EndObject or JsonTokenType.EndArray in which case it would never be sequence Debug.Assert(!reader.HasValueSequence); byte displayByte = reader.ValueSpan[0]; ThrowHelper.ThrowJsonReaderException( ref reader, ExceptionResource.ExpectedStartOfValueNotFound, displayByte); } reader = restore; document = null; return(false); } } } catch { reader = restore; throw; } int length = valueSpan.IsEmpty ? checked ((int)valueSequence.Length) : valueSpan.Length; if (useArrayPools) { byte[] rented = ArrayPool <byte> .Shared.Rent(length); Span <byte> rentedSpan = rented.AsSpan(0, length); try { if (valueSpan.IsEmpty) { valueSequence.CopyTo(rentedSpan); } else { valueSpan.CopyTo(rentedSpan); } document = Parse(rented.AsMemory(0, length), state.Options, rented); } catch { // This really shouldn't happen since the document was already checked // for consistency by Skip. But if data mutations happened just after // the calls to Read then the copy may not be valid. rentedSpan.Clear(); ArrayPool <byte> .Shared.Return(rented); throw; } } else { byte[] owned; if (valueSpan.IsEmpty) { owned = valueSequence.ToArray(); } else { owned = valueSpan.ToArray(); } document = ParseUnrented(owned, state.Options, reader.TokenType); } return(true); }
private void CopyToBuffer(ReadOnlySequence <byte> sequence) => sequence.CopyTo(_buffer.AsSpan());
public override TimeSpan Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { if (reader.TokenType != JsonTokenType.String) { throw ThrowHelper.GetInvalidOperationException_ExpectedString(reader.TokenType); } bool isEscaped = reader._stringHasEscaping; int maximumLength = isEscaped ? MaximumEscapedTimeSpanFormatLength : MaximumTimeSpanFormatLength; ReadOnlySpan <byte> source = stackalloc byte[0]; if (reader.HasValueSequence) { ReadOnlySequence <byte> valueSequence = reader.ValueSequence; long sequenceLength = valueSequence.Length; if (!JsonHelpers.IsInRangeInclusive(sequenceLength, MinimumTimeSpanFormatLength, maximumLength)) { throw ThrowHelper.GetFormatException(DataType.TimeSpan); } Span <byte> stackSpan = stackalloc byte[isEscaped ? MaximumEscapedTimeSpanFormatLength : MaximumTimeSpanFormatLength]; valueSequence.CopyTo(stackSpan); source = stackSpan.Slice(0, (int)sequenceLength); } else { source = reader.ValueSpan; if (!JsonHelpers.IsInRangeInclusive(source.Length, MinimumTimeSpanFormatLength, maximumLength)) { throw ThrowHelper.GetFormatException(DataType.TimeSpan); } } if (isEscaped) { int backslash = source.IndexOf(JsonConstants.BackSlash); Debug.Assert(backslash != -1); Span <byte> sourceUnescaped = stackalloc byte[MaximumEscapedTimeSpanFormatLength]; JsonReaderHelper.Unescape(source, sourceUnescaped, backslash, out int written); Debug.Assert(written > 0); source = sourceUnescaped.Slice(0, written); Debug.Assert(!source.IsEmpty); } byte firstChar = source[0]; if (!JsonHelpers.IsDigit(firstChar) && firstChar != '-') { // Note: Utf8Parser.TryParse allows for leading whitespace so we // need to exclude that case here. throw ThrowHelper.GetFormatException(DataType.TimeSpan); } bool result = Utf8Parser.TryParse(source, out TimeSpan tmpValue, out int bytesConsumed, 'c'); // Note: Utf8Parser.TryParse will return true for invalid input so // long as it starts with an integer. Example: "2021-06-18" or // "1$$$$$$$$$$". We need to check bytesConsumed to know if the // entire source was actually valid. if (result && source.Length == bytesConsumed) { return(tmpValue); } throw ThrowHelper.GetFormatException(DataType.TimeSpan); }