Exemple #1
0
 private BatchWithFramesArray HandleWorkerOutput(BatchWithBufferWriters batchIn, CancellationToken token)
 {
     try
     {
         ReadOnlySpan <int>    lengths = batchIn.lengths.WrittenSpan;
         ReadOnlyMemory <byte> bodies  = batchIn.concatenatedBodies.WrittenMemory;
         int         batchSize         = batchIn.lengths.WrittenCount;
         Frame <T>[] resFrames         = arrPoolOutputBatch.Rent(batchSize);
         for (int ix = 0, bodyStartIx = 0; ix < batchSize; ix++)
         {
             int itemLen = lengths[ix];
             ReadOnlyMemory <byte> body = bodies.Slice(bodyStartIx, itemLen);
             resFrames[ix] = DeserializeBody(body, token);
             bodyStartIx  += itemLen;
         }
         return(new BatchWithFramesArray()
         {
             batchSize = batchSize,
             batch = resFrames
         });
     }
     finally
     {
         objPoolBufferWriterBodies.Return(batchIn.concatenatedBodies);
         objPoolBufferWriterBodyLengths.Return(batchIn.lengths);
     }
 }
        private void BatchToStream(ref MessagePackWriter writer, BatchWithBufferWriters batch)
        {
            try
            {
                int GetItemLength(int lenghtAtIx)
                {
                    ReadOnlySpan <int> lengths = batch.lengths.WrittenSpan;

                    return(lengths[lenghtAtIx]);
                }
                int batchSize = batch.lengths.WrittenCount;
                ReadOnlySpan <byte> bodySpan = batch.concatenatedBodies.WrittenMemory.Span;
                for (int ix = 0, bodyStartIx = 0; ix < batchSize; ix++)
                {
                    int itemLen = GetItemLength(ix);

                    writer.WriteArrayHeader(Frame <T> .parallelItemTotElems);
                    writer.Write((uint)itemLen);
                    writer.WriteRaw(bodySpan.Slice(bodyStartIx, itemLen));
                    bodyStartIx += itemLen;
                }
            }
            finally
            {
                objPoolBufferWriterBodies.Return(batch.concatenatedBodies);
                objPoolBufferWriterBodyLengths.Return(batch.lengths);
            }
        }
        private async Task BatchToStreamAsync(BatchWithBufferWriters batch, CancellationToken token)
        {
            try
            {
                int GetItemLength(int lenghtAtIx)
                {
                    ReadOnlySpan <int> lengths = batch.lengths.WrittenSpan;

                    return(lengths[lenghtAtIx]);
                }
                int batchSize = batch.lengths.WrittenCount;
                for (int ix = 0, bodyStartIx = 0; ix < batchSize; ix++)
                {
                    int itemLen = GetItemLength(ix);

                    WriteHeader(stream, (uint)itemLen);
                    await stream.WriteAsync(batch.concatenatedBodies.WrittenMemory.Slice(bodyStartIx, itemLen), token);

                    bodyStartIx += itemLen;
                }
            }
            finally
            {
                objPoolBufferWriterBodies.Return(batch.concatenatedBodies);
                objPoolBufferWriterBodyLengths.Return(batch.lengths);
            }
        }
Exemple #4
0
        public async IAsyncEnumerable <Frame <T> > DeserializeAsync(Stream stream, [EnumeratorCancellation] CancellationToken token = default)
        {
            Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth);
            try
            {
                BatchWithBufferWriters currentBatch = new BatchWithBufferWriters();
                int currentBatchTotalSize           = 0;
                while (TryReadHeader(stream, out int itemLength))
                {
                    if (currentBatchTotalSize + itemLength > desiredBatchSize_bytes && currentBatchTotalSize > 0)
                    {
                        // send prev batch
                        foreach (Frame <T> t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token)))
                        {
                            yield return(t);
                        }
                        currentBatchTotalSize = 0;
                    }
                    if (currentBatchTotalSize == 0)
                    {
                        currentBatch.concatenatedBodies = objPoolBufferWriterBodies.Get();
                        currentBatch.lengths            = objPoolBufferWriterBodyLengths.Get();
                    }
                    // read element from stream and add to batch
                    currentBatch.lengths.GetSpan(1)[0] = itemLength;
                    currentBatch.lengths.Advance(1);
                    int totRead = await stream.ReadAsync(currentBatch.concatenatedBodies.GetMemory(itemLength).Slice(0, itemLength), token).ConfigureAwait(false);

                    if (totRead != itemLength)
                    {
                        throw new StreamSerializationException($"Unexpected number of bytes read from stream ({totRead}). Expected {itemLength}");
                    }
                    currentBatch.concatenatedBodies.Advance(itemLength);
                    currentBatchTotalSize += itemLength;
                }
                if (currentBatchTotalSize > 0) // send unfinished batch
                {
                    foreach (Frame <T> t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token)))
                    {
                        yield return(t);
                    }
                }
                foreach (Frame <T> t in IterateOutputBatch(fifow.Flush(token)))
                {
                    yield return(t);
                }
            }
            finally
            {
                Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth);
            }
        }
 private BatchWithBufferWriters HandleWorkerOutput(ArrayPoolBufferWriter <T> batch, CancellationToken token)
 {
     try
     {
         BatchWithBufferWriters batchOut = new BatchWithBufferWriters();
         batchOut.concatenatedBodies = objPoolBufferWriterBodies.Get();
         batchOut.lengths            = objPoolBufferWriterBodyLengths.Get();
         MessagePackWriter writerBody = new MessagePackWriter(batchOut.concatenatedBodies)
         {
             OldSpec           = w_opts.OldSpec ?? false,
             CancellationToken = token
         };
         var spanIn = batch.WrittenSpan;
         int prevWrittenBytesCount = 0;
         int sumLen = 0;
         for (int ix = 0; ix < spanIn.Length; ix++)
         {
             formatterT.Serialize(ref writerBody, spanIn[ix], w_opts);
             writerBody.Flush();
             int objLen = batchOut.concatenatedBodies.WrittenCount - prevWrittenBytesCount;
             prevWrittenBytesCount          = batchOut.concatenatedBodies.WrittenCount;
             batchOut.lengths.GetSpan(1)[0] = objLen;
             batchOut.lengths.Advance(1);
             sumLen += objLen;
         }
         if (spanIn.Length > 0)
         {
             batchEstimator.UpdateEstimate((float)sumLen / (float)spanIn.Length); // update with avg instead of updating for every loop item. It's not exact, but it's faster
         }
         return(batchOut);
     }
     finally
     {
         objPoolOutputBatch.Return(batch);
     }
 }
        public void Serialize(ref MessagePackWriter writer, TFrameList value, MessagePackSerializerOptions options)
        {
            if (value == null)
            {
                writer.WriteNil();
                return;
            }
            Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth);
            try
            {
                FrameFormatterSerializationOptions frameOptions = options.GetOptionParams();

                if (frameOptions.FIFOWorkerConfig.MaxConcurrentTasks < 1 || ParallelGatekeeperSingleton.wrapperDepth != 1)
                {
                    SerializeSynchronous(ref writer, value, options);
                    return;
                }
                int count = value.Count;
                writer.WriteArrayHeader(count);
                BatchSizeEstimator        batchEstimator = new BatchSizeEstimator(frameOptions.BatchSizeEstimatorConfig);
                IMessagePackFormatter <T> formatterT     = options.Resolver.GetFormatterWithVerify <T>();
                bool isOldSpec = writer.OldSpec;

                BatchWithBufferWriters ProcessItems(ArraySegment <Frame <T> > batch, CancellationToken token)
                {
                    BatchWithBufferWriters batchOut = new BatchWithBufferWriters();

                    batchOut.concatenatedBodies = objPoolBufferWriterBodies.Get();
                    batchOut.lengths            = objPoolBufferWriterBodyLengths.Get();
                    MessagePackWriter writerBody = new MessagePackWriter(batchOut.concatenatedBodies)
                    {
                        OldSpec = isOldSpec, CancellationToken = token
                    };
                    var spanIn = batch.AsSpan();
                    int prevWrittenBytesCount = 0;
                    int sumLen = 0;

                    for (int ix = 0; ix < spanIn.Length; ix++)
                    {
                        formatterT.Serialize(ref writerBody, spanIn[ix], options);
                        writerBody.Flush();
                        int currWrittenBytesCount = batchOut.concatenatedBodies.WrittenCount;
                        int objLen = currWrittenBytesCount - prevWrittenBytesCount;
                        prevWrittenBytesCount          = currWrittenBytesCount;
                        batchOut.lengths.GetSpan(1)[0] = objLen;
                        batchOut.lengths.Advance(1);
                        sumLen += objLen;
                    }
                    if (spanIn.Length > 0)
                    {
                        batchEstimator.UpdateEstimate((float)sumLen / (float)spanIn.Length); // update with avg instead of updating for every loop item. It's not exact, but it's faster
                    }
                    return(batchOut);
                }

                ListFrameWrapper valueWrapper = GetTFrameListWrapper(value);

                Frame <T>[] valueArray = valueWrapper.AsFrameArray();
                using (var fifow = new FIFOWorker <ArraySegment <Frame <T> >, BatchWithBufferWriters>(frameOptions.FIFOWorkerConfig, ProcessItems))
                {
                    int i = 0;
                    while (i < count)
                    {
                        int batchSize = Math.Min(count - i, batchEstimator.RecomendedBatchSize);
                        if (batchSize <= 0)
                        {
                            throw new StreamSerializationException($"Invalid batch sequence length: {batchSize}");
                        }
                        ArraySegment <Frame <T> > sourceSegment = new ArraySegment <Frame <T> >(valueArray, i, batchSize);
                        foreach (BatchWithBufferWriters batchOutput in fifow.AddWorkItem(sourceSegment, writer.CancellationToken))
                        {
                            BatchToStream(ref writer, batchOutput);
                        }

                        i += batchSize;
                    }
                    foreach (BatchWithBufferWriters batchOutput in fifow.Flush(writer.CancellationToken))
                    {
                        BatchToStream(ref writer, batchOutput);
                    }
                }
            }
            finally
            {
                Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth);
            }
        }