Example #1
0
        public async IAsyncEnumerable <Frame <T> > DeserializeAsync(Stream stream, [EnumeratorCancellation] CancellationToken token = default)
        {
            Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth);
            try
            {
                BatchWithBufferWriters currentBatch = new BatchWithBufferWriters();
                int currentBatchTotalSize           = 0;
                while (TryReadHeader(stream, out int itemLength))
                {
                    if (currentBatchTotalSize + itemLength > desiredBatchSize_bytes && currentBatchTotalSize > 0)
                    {
                        // send prev batch
                        foreach (Frame <T> t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token)))
                        {
                            yield return(t);
                        }
                        currentBatchTotalSize = 0;
                    }
                    if (currentBatchTotalSize == 0)
                    {
                        currentBatch.concatenatedBodies = objPoolBufferWriterBodies.Get();
                        currentBatch.lengths            = objPoolBufferWriterBodyLengths.Get();
                    }
                    // read element from stream and add to batch
                    currentBatch.lengths.GetSpan(1)[0] = itemLength;
                    currentBatch.lengths.Advance(1);
                    int totRead = await stream.ReadAsync(currentBatch.concatenatedBodies.GetMemory(itemLength).Slice(0, itemLength), token).ConfigureAwait(false);

                    if (totRead != itemLength)
                    {
                        throw new StreamSerializationException($"Unexpected number of bytes read from stream ({totRead}). Expected {itemLength}");
                    }
                    currentBatch.concatenatedBodies.Advance(itemLength);
                    currentBatchTotalSize += itemLength;
                }
                if (currentBatchTotalSize > 0) // send unfinished batch
                {
                    foreach (Frame <T> t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token)))
                    {
                        yield return(t);
                    }
                }
                foreach (Frame <T> t in IterateOutputBatch(fifow.Flush(token)))
                {
                    yield return(t);
                }
            }
            finally
            {
                Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth);
            }
        }
        public void AddWorkItem_ItemsProcessed(int totItems, int totThreads, int maxQueuedItems)
        {
            MockWorker mw = new MockWorker();
            CancellationTokenSource ts       = new CancellationTokenSource();
            List <MockWorkOut>      doneWork = new List <MockWorkOut>();
            var cfg = GetConfig(totThreads, maxQueuedItems);

            using (FIFOWorker <MockWorkIn, MockWorkOut> fifo = new FIFOWorker <MockWorkIn, MockWorkOut>(cfg, mw.DoMockWork_Simple))
            {
                foreach (int inputIx in Enumerable.Range(1, totItems))
                {
                    foreach (var outItem in fifo.AddWorkItem(new MockWorkIn(inputIx), ts.Token))
                    {
                        doneWork.Add(outItem);
                    }
                }
                foreach (var outItem in fifo.Flush(ts.Token))
                {
                    doneWork.Add(outItem);
                }
            }

            Assert.AreEqual(Enumerable.Range(1, totItems), doneWork.Select(f => f.originalInputItem.ix));
            Assert.AreEqual(Enumerable.Range(1, totItems), mw.doneWork.Where(f => !f.Item1).OrderBy(f => f.Item2.ix).Select(f => f.Item2.ix));
        }
        public void AddWorkItem_OneItemProcessed()
        {
            MockWorker mw = new MockWorker();
            CancellationTokenSource ts       = new CancellationTokenSource();
            List <MockWorkOut>      doneWork = new List <MockWorkOut>();
            int inputIx        = 1;
            int totThreads     = 1;
            int maxQueuedItems = 1;
            var cfg            = GetConfig(totThreads, maxQueuedItems);

            using (FIFOWorker <MockWorkIn, MockWorkOut> fifo = new FIFOWorker <MockWorkIn, MockWorkOut>(cfg, mw.DoMockWork_Simple))
            {
                foreach (var outItem in fifo.AddWorkItem(new MockWorkIn(inputIx), ts.Token).Concat(fifo.Flush(ts.Token)))
                {
                    doneWork.Add(outItem);
                }
            }

            Assert.AreEqual(1, doneWork.Count);
            Assert.AreEqual(inputIx, doneWork.First().originalInputItem.ix);
            Assert.AreEqual(1, mw.doneWork.Count);
            Assert.AreEqual(false, mw.doneWork.First().Item1);
            Assert.AreEqual(inputIx, mw.doneWork.First().Item2.ix);
            Assert.AreEqual(inputIx, mw.doneWork.First().Item3.originalInputItem.ix);
        }
        public void AddWorkItem_Cancels(int totThreads)
        {
            MockWorker mw = new MockWorker();
            CancellationTokenSource ts       = new CancellationTokenSource();
            List <MockWorkOut>      doneWork = new List <MockWorkOut>();
            var cfg = new FIFOWorkerConfig(totThreads);
            FIFOWorker <MockWorkIn, MockWorkOut> fifo = new FIFOWorker <MockWorkIn, MockWorkOut>(cfg, mw.DoMockWorkBlocking);
            int countTask = fifo.AddWorkItem(new MockWorkIn(1), ts.Token).Count();

            mw.TriggerOnBlockedWork(() => ts.Cancel());

            Assert.Throws <TaskCanceledException>(() =>
            {
                try
                {
                    fifo.Dispose();
                }
                catch (AggregateException ag)
                {
                    throw ag.GetBaseException();
                }
            });
            Assert.AreEqual(1, mw.doneWork.Count(f => f.Item1));
            Assert.AreEqual(1, mw.doneWork.Count());
        }
        public void Dispose_ExceptionInWorkerPropagates(int totThreads)
        {
            TaskCompletionSource <object> taskBlocker1 = new TaskCompletionSource <object>();

            MockWorkOut DoMockWorkBlocking(MockWorkIn work, CancellationToken token)
            {
                taskBlocker1.Task.Wait();
                throw new MockException();
            }

            CancellationTokenSource ts       = new CancellationTokenSource();
            List <MockWorkOut>      doneWork = new List <MockWorkOut>();
            var cfg = new FIFOWorkerConfig(totThreads);
            FIFOWorker <MockWorkIn, MockWorkOut> fifo = new FIFOWorker <MockWorkIn, MockWorkOut>(cfg, DoMockWorkBlocking);
            int count = fifo.AddWorkItem(new MockWorkIn(1), ts.Token).Count();

            Assert.Throws <MockException>(() =>
            {
                try
                {
                    taskBlocker1.SetResult(null);
                    fifo.Dispose();
                }
                catch (AggregateException ag)
                {
                    throw ag.GetBaseException();
                }
            });
        }
        public void Dispose_CancelsPendingTasks(int totThreads)
        {
            TaskCompletionSource <object> taskBlocker1 = new TaskCompletionSource <object>();
            TaskCompletionSource <object> taskBlocker2 = new TaskCompletionSource <object>();
            bool completed = false;

            int totBlockCalled = 0;

            MockWorkOut DoMockWorkBlocking(MockWorkIn work, CancellationToken token)
            {
                Interlocked.Increment(ref totBlockCalled);
                taskBlocker2.SetResult(null);
                taskBlocker1.Task.Wait();
                Task.Delay(1, token).Wait();
                completed = true;
                return(new MockWorkOut(work));
            }

            CancellationTokenSource ts       = new CancellationTokenSource();
            List <MockWorkOut>      doneWork = new List <MockWorkOut>();
            var cfg = new FIFOWorkerConfig(totThreads);
            FIFOWorker <MockWorkIn, MockWorkOut> fifo = new FIFOWorker <MockWorkIn, MockWorkOut>(cfg, DoMockWorkBlocking);
            int count = fifo.AddWorkItem(new MockWorkIn(1), ts.Token).Count();

            taskBlocker2.Task.Wait();
            taskBlocker1.SetResult(null);


            Assert.AreEqual(1, totBlockCalled);
            Assert.AreEqual(0, count);
            Assert.DoesNotThrow(fifo.Dispose);
            Assert.AreEqual(true, completed);
        }
Example #7
0
        public async IAsyncEnumerable <T> DeserializeAsync(Stream stream, [EnumeratorCancellation] CancellationToken token = default)
        {
            BatchIn currentBatch              = new BatchIn();
            int     currentBatchTotalSize     = 0;
            int     currentBatchTotalElements = 0;

            while (TryReadHeader(stream, out int itemLength))
            {
                if (currentBatchTotalSize + itemLength > desiredBatchSize_bytes && currentBatchTotalElements > 0)
                {
                    // send prev batch
                    foreach (T t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token)))
                    {
                        yield return(t);
                    }
                    currentBatchTotalSize     = 0;
                    currentBatchTotalElements = 0;
                }
                if (currentBatchTotalElements == 0)
                {
                    currentBatch.concatenatedBodies = objPoolBufferWriterSerializedBatch.Get();
                    currentBatch.Lengths            = objPoolList.Get();
                }
                await BufferFromStreamAsync(stream, currentBatch.concatenatedBodies, itemLength, token).ConfigureAwait(false);

                currentBatchTotalSize += itemLength;
                currentBatchTotalElements++;
                currentBatch.Lengths.Add(itemLength);
            }
            if (currentBatchTotalElements > 0) // send unfinished batch
            {
                foreach (T t in IterateOutputBatch(fifow.AddWorkItem(currentBatch, token)))
                {
                    yield return(t);
                }
            }
            foreach (T t in IterateOutputBatch(fifow.Flush(token)))
            {
                yield return(t);
            }
        }
Example #8
0
 private async Task CompleteBatch(bool flushBatch, CancellationToken token)
 {
     if (flushBatch || currentBatch.Count >= desiredBatchSize)
     {
         foreach (var bw in fifow.AddWorkItem(currentBatch, token))
         {
             await BatchToStreamAsync(bw, token).ConfigureAwait(false);
         }
         currentBatch     = objPoolList.Get();
         desiredBatchSize = batchEstimator.RecomendedBatchSize;
     }
 }
        public void Dispose_ThrowsOnCancelledTasks(int totThreads)
        {
            TaskCompletionSource <object> taskBlocker1 = new TaskCompletionSource <object>();
            TaskCompletionSource <object> taskBlocker2 = new TaskCompletionSource <object>();
            bool completed = false;

            int totBlockCalled = 0;

            MockWorkOut DoMockWorkBlocking(MockWorkIn work, CancellationToken token)
            {
                Interlocked.Increment(ref totBlockCalled);
                taskBlocker2.SetResult(null);
                taskBlocker1.Task.Wait();
                Task.Delay(1, token).Wait();
                completed = true;
                return(new MockWorkOut(work));
            }

            CancellationTokenSource ts       = new CancellationTokenSource();
            List <MockWorkOut>      doneWork = new List <MockWorkOut>();
            var cfg = new FIFOWorkerConfig(totThreads);
            FIFOWorker <MockWorkIn, MockWorkOut> fifo = new FIFOWorker <MockWorkIn, MockWorkOut>(cfg, DoMockWorkBlocking);
            int count = fifo.AddWorkItem(new MockWorkIn(1), ts.Token).Count();

            taskBlocker2.Task.Wait();
            ts.Cancel();

            Assert.Throws <TaskCanceledException>(() =>
            {
                try
                {
                    taskBlocker1.SetResult(null);
                    fifo.Dispose();
                }
                catch (AggregateException ag)
                {
                    throw ag.GetBaseException();
                }
            });
            Assert.AreEqual(1, totBlockCalled);
            Assert.AreEqual(0, count);
            Assert.AreEqual(false, completed);
        }
        public void Serialize(ref MessagePackWriter writer, TFrameList value, MessagePackSerializerOptions options)
        {
            if (value == null)
            {
                writer.WriteNil();
                return;
            }
            Interlocked.Increment(ref ParallelGatekeeperSingleton.wrapperDepth);
            try
            {
                FrameFormatterSerializationOptions frameOptions = options.GetOptionParams();

                if (frameOptions.FIFOWorkerConfig.MaxConcurrentTasks < 1 || ParallelGatekeeperSingleton.wrapperDepth != 1)
                {
                    SerializeSynchronous(ref writer, value, options);
                    return;
                }
                int count = value.Count;
                writer.WriteArrayHeader(count);
                BatchSizeEstimator        batchEstimator = new BatchSizeEstimator(frameOptions.BatchSizeEstimatorConfig);
                IMessagePackFormatter <T> formatterT     = options.Resolver.GetFormatterWithVerify <T>();
                bool isOldSpec = writer.OldSpec;

                BatchWithBufferWriters ProcessItems(ArraySegment <Frame <T> > batch, CancellationToken token)
                {
                    BatchWithBufferWriters batchOut = new BatchWithBufferWriters();

                    batchOut.concatenatedBodies = objPoolBufferWriterBodies.Get();
                    batchOut.lengths            = objPoolBufferWriterBodyLengths.Get();
                    MessagePackWriter writerBody = new MessagePackWriter(batchOut.concatenatedBodies)
                    {
                        OldSpec = isOldSpec, CancellationToken = token
                    };
                    var spanIn = batch.AsSpan();
                    int prevWrittenBytesCount = 0;
                    int sumLen = 0;

                    for (int ix = 0; ix < spanIn.Length; ix++)
                    {
                        formatterT.Serialize(ref writerBody, spanIn[ix], options);
                        writerBody.Flush();
                        int currWrittenBytesCount = batchOut.concatenatedBodies.WrittenCount;
                        int objLen = currWrittenBytesCount - prevWrittenBytesCount;
                        prevWrittenBytesCount          = currWrittenBytesCount;
                        batchOut.lengths.GetSpan(1)[0] = objLen;
                        batchOut.lengths.Advance(1);
                        sumLen += objLen;
                    }
                    if (spanIn.Length > 0)
                    {
                        batchEstimator.UpdateEstimate((float)sumLen / (float)spanIn.Length); // update with avg instead of updating for every loop item. It's not exact, but it's faster
                    }
                    return(batchOut);
                }

                ListFrameWrapper valueWrapper = GetTFrameListWrapper(value);

                Frame <T>[] valueArray = valueWrapper.AsFrameArray();
                using (var fifow = new FIFOWorker <ArraySegment <Frame <T> >, BatchWithBufferWriters>(frameOptions.FIFOWorkerConfig, ProcessItems))
                {
                    int i = 0;
                    while (i < count)
                    {
                        int batchSize = Math.Min(count - i, batchEstimator.RecomendedBatchSize);
                        if (batchSize <= 0)
                        {
                            throw new StreamSerializationException($"Invalid batch sequence length: {batchSize}");
                        }
                        ArraySegment <Frame <T> > sourceSegment = new ArraySegment <Frame <T> >(valueArray, i, batchSize);
                        foreach (BatchWithBufferWriters batchOutput in fifow.AddWorkItem(sourceSegment, writer.CancellationToken))
                        {
                            BatchToStream(ref writer, batchOutput);
                        }

                        i += batchSize;
                    }
                    foreach (BatchWithBufferWriters batchOutput in fifow.Flush(writer.CancellationToken))
                    {
                        BatchToStream(ref writer, batchOutput);
                    }
                }
            }
            finally
            {
                Interlocked.Decrement(ref ParallelGatekeeperSingleton.wrapperDepth);
            }
        }