public async IAsyncEnumerable <T> DeserializeAsync(Stream stream, [EnumeratorCancellation] CancellationToken token = default) { BatchIn currentBatch = new BatchIn(); int currentBatchTotalSize = 0; int currentBatchTotalElements = 0; while (TryReadHeader(stream, out int itemLength)) { if (currentBatchTotalSize + itemLength > desiredBatchSize_bytes && currentBatchTotalElements > 0) { // send prev batch foreach (T t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token))) { yield return(t); } currentBatchTotalSize = 0; currentBatchTotalElements = 0; } if (currentBatchTotalElements == 0) { currentBatch.concatenatedBodies = objPoolBufferWriterSerializedBatch.Get(); currentBatch.Lengths = objPoolList.Get(); } await BufferFromStreamAsync(stream, currentBatch.concatenatedBodies, itemLength, token).ConfigureAwait(false); currentBatchTotalSize += itemLength; currentBatchTotalElements++; currentBatch.Lengths.Add(itemLength); } if (currentBatchTotalElements > 0) // send unfinished batch { foreach (T t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token))) { yield return(t); } } foreach (T t in IterateOutputBatch(fifow.Flush(token))) { yield return(t); } }
private BatchOut HandleWorkerOutput(BatchIn batch, CancellationToken token) { try { var obj = typeModel.Deserialize(t_ParallelServices_ArrayWrapper, batch.concatenatedBodies.WrittenSpan); if (obj is ParallelServices_ArrayWrapper <T> arrWT) { return new BatchOut() { elements = arrWT.Array } } ; else { throw new StreamSerializationException($"Invalid deserialized element type. Expected {t_ParallelServices_ArrayWrapper}, got [{(obj is null ? "null" : obj.GetType().ToString())}]"); } } finally { objPoolBufferWriterSerializedBatch.Return(batch.concatenatedBodies); objPoolList.Return(batch.Lengths); } }