public Frame <T> Deserialize(ref MessagePackReader reader, MessagePackSerializerOptions options) { FrameFormatterSerializationOptions frameOptions = options.GetOptionParams(); var formatter = options.Resolver.GetFormatterWithVerify <T>(); return(Deserialize(ref reader, options, frameOptions, formatter)); }
private TFrameList DeserializeSynchronous(ref MessagePackReader reader, MessagePackSerializerOptions options) { int count = reader.ReadArrayHeader(); ListFrameWrapper valueWrapper = GetTFrameListWrapper(count); if (count > 0) { FrameFormatterSerializationOptions frameOptions = options.GetOptionParams(); Frame <T>[] resItems = valueWrapper.AsFrameArray(); var formatter = options.Resolver.GetFormatterWithVerify <T>(); for (int i = 0; i < count; i++) { resItems[i] = FrameItemFormatter <T> .Deserialize(ref reader, options, frameOptions, formatter); } } return(valueWrapper.AsFrameList()); }
public TFrameList Deserialize(ref MessagePackReader reader, MessagePackSerializerOptions options) { if (reader.TryReadNil()) { return((TFrameList)(IList <T>)null); } Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth); try { options.Security.DepthStep(ref reader); try { FrameFormatterSerializationOptions frameOptions = options.GetOptionParams(); if (frameOptions.MthWorkerConfig.MaxConcurrentTasks == 1 || ParallelGatekeeperSingleton.wrapperDepth > 1) { return(DeserializeSynchronous(ref reader, options)); } var readerBackup = reader.CreatePeekReader(); int count = reader.ReadArrayHeader(); if (count == 0) { reader = readerBackup; return(DeserializeSynchronous(ref reader, options)); } var peekreader = reader.CreatePeekReader(); if (FrameItemFormatter <T> .ReadElementHeader(ref peekreader) == Frame <T> .unassigned) { if (frameOptions.ThrowOnUnnasignedFrameDeserialization) { throw new StreamSerializationException($"Unassigned buffer length found during parallel deserialize for {nameof(TFrameList)}"); } reader = readerBackup; return(DeserializeSynchronous(ref reader, options)); } IMessagePackFormatter <T> formatterT = options.Resolver.GetFormatterWithVerify <T>(); ListFrameWrapper valueWrapper = GetTFrameListWrapper(count); Frame <T>[] resItems = valueWrapper.AsFrameArray(); BatchSizeEstimator batchEstimator = new BatchSizeEstimator(frameOptions.BatchSizeEstimatorConfig); void ProcessBatch(BatchWithBufferWritersAndElementOffset batch, CancellationToken token) { try { ReadOnlySpan <int> lengths = batch.buffers.lengths.WrittenSpan; ReadOnlyMemory <byte> bodies = batch.buffers.concatenatedBodies.WrittenMemory; int batchSize = batch.buffers.lengths.WrittenCount; var destSpan = resItems.AsSpan(batch.offset, batchSize); for (int ix = 0, bodyStartIx = 0; ix < batchSize; ix++) { int itemLen = lengths[ix]; ReadOnlyMemory <byte> body = bodies.Slice(bodyStartIx, itemLen); MessagePackReader tmpReader = new MessagePackReader(body) { CancellationToken = token }; destSpan[ix].BufferLength = body.Length; destSpan[ix].Item = formatterT.Deserialize(ref tmpReader, options); bodyStartIx += itemLen; } } finally { objPoolBufferWriterBodies.Return(batch.buffers.concatenatedBodies); objPoolBufferWriterBodyLengths.Return(batch.buffers.lengths); } } using (var mtw = new MultiThreadedWorker <BatchWithBufferWritersAndElementOffset>( frameOptions.MthWorkerConfig, ProcessBatch)) { int i = 0; while (i < count) { int batchSize = Math.Min(count - i, batchEstimator.RecomendedBatchSize); var currentBatch = new BatchWithBufferWritersAndElementOffset() { offset = i, buffers = new BatchWithBufferWriters() { concatenatedBodies = objPoolBufferWriterBodies.Get(), lengths = objPoolBufferWriterBodyLengths.Get() } }; for (int seqIx = 0; seqIx < batchSize; seqIx++) { int itemLength = FrameItemFormatter <T> .ReadElementHeader(ref reader); if (itemLength == Frame <T> .unassigned) { throw new StreamSerializationException($"Unassigned buffer length found during parallel deserialize for {nameof(TFrameList)}"); } currentBatch.buffers.lengths.GetSpan(1)[0] = itemLength; currentBatch.buffers.lengths.Advance(1); ReadOnlySequence <byte> raw = reader.ReadRaw(itemLength); raw.CopyTo(currentBatch.buffers.concatenatedBodies.GetSpan(itemLength)); currentBatch.buffers.concatenatedBodies.Advance(itemLength); batchEstimator.UpdateEstimate(itemLength); } mtw.AddWorkItem(currentBatch, reader.CancellationToken); i += batchSize; } } return(valueWrapper.AsFrameList()); } finally { reader.Depth--; } } finally { Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth); } }
public void Serialize(ref MessagePackWriter writer, TFrameList value, MessagePackSerializerOptions options) { if (value == null) { writer.WriteNil(); return; } Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth); try { FrameFormatterSerializationOptions frameOptions = options.GetOptionParams(); if (frameOptions.FIFOWorkerConfig.MaxConcurrentTasks < 1 || ParallelGatekeeperSingleton.wrapperDepth != 1) { SerializeSynchronous(ref writer, value, options); return; } int count = value.Count; writer.WriteArrayHeader(count); BatchSizeEstimator batchEstimator = new BatchSizeEstimator(frameOptions.BatchSizeEstimatorConfig); IMessagePackFormatter <T> formatterT = options.Resolver.GetFormatterWithVerify <T>(); bool isOldSpec = writer.OldSpec; BatchWithBufferWriters ProcessItems(ArraySegment <Frame <T> > batch, CancellationToken token) { BatchWithBufferWriters batchOut = new BatchWithBufferWriters(); batchOut.concatenatedBodies = objPoolBufferWriterBodies.Get(); batchOut.lengths = objPoolBufferWriterBodyLengths.Get(); MessagePackWriter writerBody = new MessagePackWriter(batchOut.concatenatedBodies) { OldSpec = isOldSpec, CancellationToken = token }; var spanIn = batch.AsSpan(); int prevWrittenBytesCount = 0; int sumLen = 0; for (int ix = 0; ix < spanIn.Length; ix++) { formatterT.Serialize(ref writerBody, spanIn[ix], options); writerBody.Flush(); int currWrittenBytesCount = batchOut.concatenatedBodies.WrittenCount; int objLen = currWrittenBytesCount - prevWrittenBytesCount; prevWrittenBytesCount = currWrittenBytesCount; batchOut.lengths.GetSpan(1)[0] = objLen; batchOut.lengths.Advance(1); sumLen += objLen; } if (spanIn.Length > 0) { batchEstimator.UpdateEstimate((float)sumLen / (float)spanIn.Length); // update with avg instead of updating for every loop item. It's not exact, but it's faster } return(batchOut); } ListFrameWrapper valueWrapper = GetTFrameListWrapper(value); Frame <T>[] valueArray = valueWrapper.AsFrameArray(); using (var fifow = new FIFOWorker <ArraySegment <Frame <T> >, BatchWithBufferWriters>(frameOptions.FIFOWorkerConfig, ProcessItems)) { int i = 0; while (i < count) { int batchSize = Math.Min(count - i, batchEstimator.RecomendedBatchSize); if (batchSize <= 0) { throw new StreamSerializationException($"Invalid batch sequence length: {batchSize}"); } ArraySegment <Frame <T> > sourceSegment = new ArraySegment <Frame <T> >(valueArray, i, batchSize); foreach (BatchWithBufferWriters batchOutput in fifow.AddWorkItem(sourceSegment, writer.CancellationToken)) { BatchToStream(ref writer, batchOutput); } i += batchSize; } foreach (BatchWithBufferWriters batchOutput in fifow.Flush(writer.CancellationToken)) { BatchToStream(ref writer, batchOutput); } } } finally { Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth); } }