Esempio n. 1
0
        public void HappyFlow()
        {
            // 500m on i9-9900 takes about 5-8secs.. bottle-necked by consumer thread. (Release build)
            long expectPayloadCount = 500000000;

            TestContext.WriteLine($"Pipe allocation {_stopwatch.Elapsed}");
            IPipe <long> sut = new BatchingPipe <long>(100000000);

            TestContext.WriteLine($"Pipe allocation end {_stopwatch.Elapsed}");

            // Limited buffer prevents this test eating more than ~1GB
            Task producingTask = RunProducingTask(sut, expectPayloadCount, 100000000);


            TestContext.WriteLine($"Consumer stating {_stopwatch.Elapsed}");
            long expectedNextValue = 0;

            foreach (long actualValue in sut.ToEnumerable())
            {
                // Tried using NUnit assert first - Assert.AreEqual(expectedNextValue++, actualValue)
                // But it introduced 99%+ of cpu overhead making this test run for minutes instead of seconds.
                if (expectedNextValue++ != actualValue)
                {
                    Assert.Fail($"Order was not preserved e:{expectedNextValue-1} a:{actualValue}");
                }
            }

            Assert.IsTrue(producingTask.IsCompletedSuccessfully);
            Assert.AreEqual(expectPayloadCount, expectedNextValue);

            TestContext.WriteLine($"Consumer completed {_stopwatch.Elapsed}");
        }
Esempio n. 2
0
        public IPipe <T> Create()
        {
            BatchingPipe <T> pipe = new BatchingPipe <T>();

            PipeCreatedEvent?.Invoke(this, pipe);

            return(pipe);
        }
Esempio n. 3
0
        public void HappyFlow()
        {
            BatchingPipe <Packet <byte> > packetsPipe = new BatchingPipe <Packet <byte> >();

            _ = packetsPipe.ProduceCompleteAsync(FixedTestData.CreatePackets());

            Stream sut = packetsPipe.ToReadOnlyStream();

            FixedTestData.AssertStream(sut);
        }
Esempio n. 4
0
        public async Task HappyFlowHeavyPooledBatches()
        {
            // 5B (5000m) on i9-9900 takes about 25secs.. bottle-necked by producer thread. (Release build)
            long expectPayloadCount = 5000000000;

            TestContext.WriteLine($"Pipe allocation {_stopwatch.Elapsed}");
            IPipe <long> sut = new BatchingPipe <long>(100000000);

            TestContext.WriteLine($"Pipe allocation complete {_stopwatch.Elapsed}");

            // with unlimited buffer, test run was sitting between 3-16GB of ram usage.
            Task producingTask = RunProducingTask(sut, expectPayloadCount, Int32.MaxValue);



            TestContext.WriteLine($"Consumer stating {_stopwatch.Elapsed}");
            ConcurrentBag <BatchSummary> results = new ConcurrentBag <BatchSummary>();
            await sut
            .ToPacketsAsyncEnumerable()
            .ParallelForEachAsync((packet, index) =>
            {
                try
                {
                    results.Add(BatchSummary.Validate(packet, index));
                    return(Task.CompletedTask);
                }
                finally
                {
                    packet.Dispose();
                }
            }
                                  );

            TestContext.WriteLine($"Consumer completed {_stopwatch.Elapsed}");

            Assert.IsTrue(producingTask.IsCompletedSuccessfully);

            BatchSummary[] resultsArray = results
                                          .Where(p => !p.Empty)
                                          .OrderBy(r => r.Index)
                                          .ToArray();

            Assert.Positive(resultsArray.Length);
            Assert.IsTrue(resultsArray.All(r => r.Valid));

            for (int i = 1; i < resultsArray.Length; i++)
            {
                if (resultsArray[i].First - 1 != resultsArray[i - 1].Last)
                {
                    Assert.Fail("Bad ordering detected");
                }
            }

            Assert.AreEqual(expectPayloadCount - 1, resultsArray[^ 1].Last);
Esempio n. 5
0
        public void End()
        {
            if (_queue != null)
            {
                _queue.ProducingCompleted();
                _queue = null;

                _writerTask.Wait();

                AssertTask();
            }
        }
        public async Task HappyFlow()
        {
            Stream inputStream = FixedTestData.CreateStream();
            IPipe <Packet <byte> > duplicateBuffer = new BatchingPipe <Packet <byte> >();

            DuplicateStream sut = new DuplicateStream(inputStream, duplicateBuffer.ToWriteOnlyStream());

            // As sut is read for assertion, in the process it should start to write data to duplication destination
            Task assertSutStream = Task.Run(() => FixedTestData.AssertStream(sut));

            // Thus with next task we should be able to assert both streams at the same time.
            Task assertDuplicatedStream = Task.Run(() => FixedTestData.AssertStream(duplicateBuffer.ToReadOnlyStream()));

            await assertSutStream;
            await assertDuplicatedStream;
        }
Esempio n. 7
0
        public void HappyFlow()
        {
            Stream testData = new CombinedStream(FixedTestData.CreateStreams());

            BatchingPipe <Packet <byte> > packetsPipe = new BatchingPipe <Packet <byte> >();
            Stream sut = packetsPipe.ToWriteOnlyStream();

            Task.Run(async() =>
            {
                await testData.CopyToAsync(sut);
                await sut.DisposeAsync(); // Closing/Disposing adapter triggers completion of producer by default.
            });


            FixedTestData.AssertStream(packetsPipe.ToReadOnlyStream());
            //FixedTestData.DebugStream(packetsPipe.ToReadOnlyStream());
        }
Esempio n. 8
0
        public static IEnumerable <IEnumerable <T> > ToBatchesAsync <T>(this IEnumerable <T> source, int batchSize)
        {
            using IEnumerator <T> enumerator = source.GetEnumerator();

            int index = 0;

            while (enumerator.MoveNext())
            {
                BatchingPipe <T> batch = new BatchingPipe <T>();
                Task             task  = Task.Run(() => ProduceBatch(enumerator, batch, batchSize));

                yield return(batch.ToEnumerable());

                Task.WaitAll(task);

                if (!task.IsCompletedSuccessfully)
                {
                    throw new Exception("Failure while reading source enumerable", task.Exception);
                }
            }
        }
Esempio n. 9
0
 public void ActivateOptions()
 {
     _pipe = new BatchingPipe <LoggingEvent>();
     _cancellationTokenSource = new CancellationTokenSource();
     _consumerTask            = StartConsumer(_cancellationTokenSource.Token);
 }
Esempio n. 10
0
        public void Begin()
        {
            _queue = new BatchingPipe <IResult>();

            _writerTask = _queue.ToEnumerableAsync(Process);
        }
Esempio n. 11
0
 private void OnPipeCreatedEvent(IPipeFactory <IResult> sender, BatchingPipe <IResult> pipe)
 {
     _muxer.Add(pipe);
 }