public async Task MultithreadingInsertsDontCrash()
        {
            int insertThreads  = 4;
            int itemsPerThread = 100;

            _queue = new AsyncBatchQueue <int>(11);

            List <Task> insertTasks = Enumerable.Range(1, insertThreads)
                                      .Select(
                _ => Task.Run(
                    () =>
            {
                for (int i = 0; i < itemsPerThread; i++)
                {
                    _queue.Add(42);
                }
            }))
                                      .ToList();

            await Task.WhenAll(insertTasks).ConfigureAwait(true);

            _queue.Flush();

            int itemsTaken = 0;

            while (_queue.Count > 0)
            {
                itemsTaken += (await _queue.TakeAsync().ConfigureAwait(true)).Count;
            }

            itemsTaken.Should().Be(insertThreads * itemsPerThread);
        }
		public async Task MultithreadingInsertsDontCrash()
		{
			int insertThreads = 4;
			int itemsPerThread = 100;

			_queue = new AsyncBatchQueue<int>( 11 );

			List<Task> insertTasks = Enumerable.Range( 1, insertThreads )
				.Select(
					_ => Task.Run(
						() =>
						{
							for ( int i = 0; i < itemsPerThread; i++ )
								_queue.Add( 42 );
						} ) )
				.ToList();

			await Task.WhenAll( insertTasks );
			_queue.Flush();

			int itemsTaken = 0;
			while ( _queue.Count > 0 )
				itemsTaken += ( await _queue.TakeAsync() ).Count;

			Assert.AreEqual( insertThreads * itemsPerThread, itemsTaken );
		}
Esempio n. 3
0
        public async Task InMemoryQueue_PushAndGet(int batchCount)
        {
            var secondary = new InMemoryQueue <List <string> >(10);

            var q = new AsyncBatchQueue <string>(1000,
                                                 new long[] { batchCount },
                                                 new Func <string, long>[] { s => 1 }, secondary);

            await q.PushSecondaryAsync(Enumerable.Repeat("a", batchCount + 1).ToList());

            var output  = new List <string>();
            var getTask = q.GetNextBatchAsync(output, 10 * 1000).AsTask();

            Assert.True(getTask.Wait(1000));
            Assert.Equal(batchCount, output.Count);

            if (batchCount == 0)
            {
                return;
            }

            // second call should return the last item
            output.Clear();
            await q.GetNextBatchAsync(output, 100);

            Assert.Single(output);
        }
Esempio n. 4
0
        public async Task GetBatchWhenLimitIsReached(int batchCount)
        {
            var q = new AsyncBatchQueue <string>(10000,
                                                 new long[] { batchCount },
                                                 new Func <string, long>[] { s => 1 });

            var output = new List <string>();

            for (var i = 0; i < batchCount + 1; i++)
            {
                await q.PushAsync("a");
            }

            var getTask = q.GetNextBatchAsync(output, 10 * 1000).AsTask();

            // first call should returns a full batch
            Assert.True(getTask.Wait(1000));
            Assert.Equal(batchCount, output.Count);

            if (batchCount == 0)
            {
                return;
            }
            // second call should return the last item
            output.Clear();
            await q.GetNextBatchAsync(output, 100);

            Assert.Single(output);
        }
Esempio n. 5
0
        public async Task InMemoryQueue_ConcurrentRead(int readerCount, int itemCount)
        {
            var secondary = new InMemoryQueue <List <int> >(1000);

            using var cts       = new CancellationTokenSource();
            using var semaphore = new SemaphoreSlim(0, readerCount);
            var results = new List <int>();
            var q       = new AsyncBatchQueue <int>(10000,
                                                    new long[] { 100 },
                                                    new Func <int, long>[] { s => 1 }, secondary);

            async Task readerTask()
            {
                var output = new List <int>();

                await semaphore.WaitAsync();

                // we're trying to test that the readers will 'eventually' read all the items, so we do several pulls here
                await q.GetNextBatchAsync(output, 500);

                await q.GetNextBatchAsync(output, 500);

                await q.GetNextBatchAsync(output, 500);

                await Task.Delay(100);

                lock (results)
                {
                    results.AddRange(output);
                }
            };

            var readers = new Task[readerCount];

            for (var i = 0; i < readerCount; i++)
            {
                readers[i] = readerTask();
            }

            for (var i = 0; i < itemCount; i++)
            {
                if (i % 2 == 0)
                {
                    await q.PushAsync(i);
                }
                else
                {
                    await q.PushSecondaryAsync(new List <int> {
                        i
                    });
                }
            }
            semaphore.Release(readerCount);
            await Task.WhenAll(readers);

            _output.WriteLine(results.Count.ToString());
            _output.WriteLine(q.EstimateSize().ToString());
            _output.WriteLine(q.EstimateSecondaryQueueSize().ToString());
            Assert.Equal(itemCount, results.Distinct().Count());
        }
Esempio n. 6
0
        public async Task InMemoryQueue_PullBothQueues()
        {
            var secondary = new InMemoryQueue <List <int> >(10);

            var q = new AsyncBatchQueue <int>(500,
                                              new long[] { 500 },
                                              new Func <int, long>[] { s => 1 }, secondary);

            await q.PushSecondaryAsync(Enumerable.Range(0, 500).ToList());

            for (var i = 500; i < 1000; i++)
            {
                await q.PushAsync(i);
            }

            // pull 3 times
            var output = new List <int>();
            await q.GetNextBatchAsync(output, 1000);

            await q.GetNextBatchAsync(output, 1000);

            await q.GetNextBatchAsync(output, 1000);

            Assert.Equal(1000, output.Distinct().Count());
        }
		public async Task TimerFlushesPendingItems()
		{
			TimeSpan flushPeriod = TimeSpan.FromMilliseconds( 500 );
			_queue = new AsyncBatchQueue<int>( 9999, flushPeriod );
			_queue.Add( 42 );

			await Task.Delay( flushPeriod + flushPeriod );
			var batch = await _queue.TakeAsync();
			CollectionAssert.AreEqual( new[] { 42 }, batch.ToList() );
		}
        public async Task NoRaceBetweenFlushOnAddAndOnDemand()
        {
            const int attempts  = 100 * 1000;
            const int batchSize = 5;

            _queue = new AsyncBatchQueue <int>(batchSize);

            for (int attemptNumber = 0; attemptNumber < attempts; attemptNumber++)
            {
                AddAllItemsButOne(batchSize);

                using (ManualResetEvent trigger = new ManualResetEvent(initialState: false))
                {
                    Task addTask = Task.Run
                                   (
                        () =>
                    {
                        trigger.WaitOne();
                        _queue.Add(666);
                    }
                                   );

                    Task flushTask = Task.Run
                                     (
                        () =>
                    {
                        trigger.WaitOne();
                        _queue.Flush();
                    }
                                     );

                    trigger.Set();
                    await addTask.ConfigureAwait(true);

                    await flushTask.ConfigureAwait(true);

                    IReadOnlyList <int> batch = await _queue.TakeAsync().ConfigureAwait(true);

                    List <int> allItems = batch.ToList();

                    // This happens if Flush occurred before Add, which means there's another item from Add left unflushed.
                    // Gotta flush once more to extract it.
                    if (batch.Count < batchSize)
                    {
                        _queue.Flush();
                        IReadOnlyList <int> secondBatch = await _queue.TakeAsync().ConfigureAwait(true);

                        allItems.AddRange(secondBatch);
                    }

                    allItems.Count.Should().BeLessOrEqualTo(batchSize, $"Double flush detected at attempt #{attemptNumber}. Items: {String.Join( ", ", allItems )}");
                }
            }
        }
        public async Task TimerFlushesPendingItems()
        {
            TimeSpan flushPeriod = TimeSpan.FromMilliseconds(500);
            var      timerQueue  = new AsyncBatchQueue <int>(9999).WithFlushEvery(flushPeriod);

            timerQueue.Add(42);

            await Task.Delay(flushPeriod + flushPeriod).ConfigureAwait(true);

            var batch = await timerQueue.TakeAsync().ConfigureAwait(true);

            batch.Should().BeEqualTo(new[] { 42 });
        }
		public async Task ManualFlushWorks()
		{
			int[] array = { 0, 1, 42 };

			_queue = new AsyncBatchQueue<int>( 50 );
			foreach ( var item in array )
				_queue.Add( item );

			_queue.Flush();
			var batch = await _queue.TakeAsync();

			CollectionAssert.AreEqual( array, batch.ToList() );
		}
Esempio n. 11
0
        public AWSBufferedSink(string id, string sessionName,
                               IAppDataFileProvider appDataFileProvider,
                               ILogger logger,
                               IMetrics metrics,
                               IBookmarkManager bookmarkManager,
                               NetworkStatus networkStatus,
                               AWSBufferedSinkOptions options)
        {
            Id                        = id;
            _logger                   = logger;
            _metrics                  = metrics;
            _bookmarkManager          = bookmarkManager;
            _networkStatus            = networkStatus;
            _bufferIntervalMs         = options.BufferIntervalMs;
            _format                   = options.Format;
            _maxSecondaryQueueBatches = options.QueueMaxBatches;
            var secondaryQueue = CreateSecondaryQueue(options, sessionName, appDataFileProvider, logger);

            _queue = new AsyncBatchQueue <Envelope <T> >(options.QueueSizeItems,
                                                         new long[] { options.MaxBatchSize, options.MaxBatchBytes },
                                                         new Func <Envelope <T>, long>[] { r => 1, GetRecordSize },
                                                         secondaryQueue);

            _maxAttempts             = options.MaxAttempts;
            _jittingFactor           = options.JittingFactor;
            _backoffFactor           = options.BackoffFactor;
            _recoveryFactor          = options.RecoveryFactor;
            _minRateAdjustmentFactor = options.MinRateAdjustmentFactor;
            _uploadNetworkPriority   = options.UploadNetworkPriority;

            if (options.TextDecoration is not null)
            {
                _textDecorationEvaluator = new TextDecorationEvaluator(options.TextDecoration, ResolveRecordVariables);
            }

            if (options.TextDecorationEx is not null)
            {
                _textDecorationEvaluator = new TextDecorationExEvaluator(options.TextDecorationEx, EvaluateVariable, ResolveRecordVariable, logger);
            }

            if (options.ObjectDecoration is not null)
            {
                _objectDecorationEvaluator = new ObjectDecorationEvaluator(options.ObjectDecoration, ResolveRecordVariables);
            }

            if (options.ObjectDecorationEx is not null)
            {
                _objectDecorationEvaluator = new ObjectDecorationExEvaluator(options.ObjectDecorationEx, EvaluateVariable, ResolveRecordVariable, logger);
            }
        }
        public async Task ManualFlushWorks()
        {
            int[] array = { 0, 1, 42 };

            _queue = new AsyncBatchQueue <int>(50);
            foreach (var item in array)
            {
                _queue.Add(item);
            }

            _queue.Flush();
            var batch = await _queue.TakeAsync().ConfigureAwait(true);

            batch.Should().BeEqualTo(array);
        }
		public async Task FlushesWhenBatchSizeIsReached()
		{
			int[] array = { 0, 1, 42 };
			int index = 0;

			_queue = new AsyncBatchQueue<int>( array.Length );
			for ( ; index < array.Length - 1; index++ )
				_queue.Add( array[ index ] );

			var takeTask = _queue.TakeAsync();
			Assert.IsFalse( takeTask.IsCompleted );

			_queue.Add( array[ index ] );
			var batch = await takeTask;

			CollectionAssert.AreEqual( array, batch.ToList() );
		}
Esempio n. 14
0
        public async Task GetBatchWhenTimerExpires(int batchCount, int remaining)
        {
            var q = new AsyncBatchQueue <string>(10000,
                                                 new long[] { batchCount },
                                                 new Func <string, long>[] { s => 1 });

            var output = new List <string>();

            for (var i = 0; i < batchCount - remaining; i++)
            {
                await q.PushAsync("a");
            }

            var getTask = q.GetNextBatchAsync(output, 500).AsTask();

            Assert.True(getTask.Wait(5000));
            Assert.Equal(batchCount - remaining, output.Count);
        }
Esempio n. 15
0
        public async Task PersistentQueue_PushAndGet(int batchCount)
        {
            var dataDir  = Path.Combine(AppContext.BaseDirectory, Guid.NewGuid().ToString());
            var queueDir = Path.Combine(dataDir, _queueDirName);

            Directory.CreateDirectory(queueDir);
            var fileProvider = new ProtectedAppDataFileProvider(dataDir);

            try
            {
                var secondary = new FilePersistentQueue <List <int> >(100, queueDir, new IntegerListSerializer(), fileProvider, NullLogger.Instance);

                var q = new AsyncBatchQueue <int>(1000,
                                                  new long[] { batchCount },
                                                  new Func <int, long>[] { s => 1 }, secondary);

                await q.PushSecondaryAsync(Enumerable.Range(0, batchCount + 1).ToList());

                var output  = new List <int>();
                var getTask = q.GetNextBatchAsync(output, 10 * 1000).AsTask();
                Assert.True(getTask.Wait(1000));
                Assert.Equal(batchCount, output.Count);

                if (batchCount == 0)
                {
                    return;
                }

                // second call should return the last item
                output.Clear();
                await q.GetNextBatchAsync(output, 100);

                Assert.Single(output);
            }
            finally
            {
                if (Directory.Exists(dataDir))
                {
                    Directory.Delete(dataDir, true);
                }
            }
        }
        public async Task FlushesWhenBatchSizeIsReached()
        {
            int[] array = { 0, 1, 42 };
            int   index = 0;

            _queue = new AsyncBatchQueue <int>(array.Length);
            for ( ; index < array.Length - 1; index++)
            {
                _queue.Add(array[index]);
            }

            var takeTask = _queue.TakeAsync();

            takeTask.IsCompleted.Should().BeFalse();

            _queue.Add(array[index]);
            var batch = await takeTask.ConfigureAwait(true);

            batch.Should().BeEqualTo(array);
        }
Esempio n. 17
0
        public async Task PushToFullQueue_Cancellation()
        {
            using var cts = new CancellationTokenSource();
            var output = new List <string>();
            var q      = new AsyncBatchQueue <string>(1000,
                                                      new long[] { 100 },
                                                      new Func <string, long>[] { s => 1 });

            for (var i = 0; i < 1000; i++)
            {
                await q.PushAsync("a");
            }

            var pushTask = q.PushAsync("b", cts.Token).AsTask();

            cts.Cancel();

            await Task.Delay(100);

            Assert.True(pushTask.IsCompleted);
        }
Esempio n. 18
0
        public async Task GetBatch_Cancellation()
        {
            using var cts = new CancellationTokenSource();
            var output = new List <string>();
            var q      = new AsyncBatchQueue <string>(10000,
                                                      new long[] { 100 },
                                                      new Func <string, long>[] { s => 1 });

            for (var i = 0; i < 99; i++)
            {
                await q.PushAsync("a");
            }

            var getTask = q.GetNextBatchAsync(output, int.MaxValue, cts.Token).AsTask();

            cts.Cancel();

            await Task.Delay(200);

            Assert.True(getTask.IsCompleted);
        }
Esempio n. 19
0
        public async Task PersistentQueue_PullBothQueues()
        {
            var dataDir  = Path.Combine(AppContext.BaseDirectory, Guid.NewGuid().ToString());
            var queueDir = Path.Combine(dataDir, _queueDirName);

            Directory.CreateDirectory(queueDir);
            var fileProvider = new ProtectedAppDataFileProvider(dataDir);

            try
            {
                var secondary = new FilePersistentQueue <List <int> >(10, queueDir, new IntegerListSerializer(), fileProvider, NullLogger.Instance);
                var q         = new AsyncBatchQueue <int>(500,
                                                          new long[] { 500 },
                                                          new Func <int, long>[] { s => 1 }, secondary);

                await q.PushSecondaryAsync(Enumerable.Range(0, 500).ToList());

                for (var i = 500; i < 1000; i++)
                {
                    await q.PushAsync(i);
                }

                // pull 3 times
                var output = new List <int>();
                await q.GetNextBatchAsync(output, 1000);

                await q.GetNextBatchAsync(output, 1000);

                await q.GetNextBatchAsync(output, 1000);

                Assert.Equal(1000, output.Distinct().Count());
            }
            finally
            {
                if (Directory.Exists(dataDir))
                {
                    Directory.Delete(dataDir, true);
                }
            }
        }
Esempio n. 20
0
        public async Task LimitIsReached_OrderIsPresevered()
        {
            var q = new AsyncBatchQueue <int>(10000,
                                              new long[] { 100 },
                                              new Func <int, long>[] { s => 1 });

            for (var i = 0; i < 150 + 1; i++)
            {
                await q.PushAsync(i);
            }

            var output = new List <int>();
            await q.GetNextBatchAsync(output, 1000);

            // pull again
            await q.GetNextBatchAsync(output, 100);

            for (var i = 0; i < 150; i++)
            {
                Assert.Equal(i, output[i]);
            }
        }
Esempio n. 21
0
        public async Task PersistentQueue_ConcurrentRead(int readerCount, int itemCount)
        {
            var dataDir  = Path.Combine(AppContext.BaseDirectory, Guid.NewGuid().ToString());
            var queueDir = Path.Combine(dataDir, _queueDirName);

            Directory.CreateDirectory(queueDir);
            var fileProvider = new ProtectedAppDataFileProvider(dataDir);

            try
            {
                var secondary = new FilePersistentQueue <List <int> >(100000, queueDir, new IntegerListSerializer(), fileProvider, NullLogger.Instance);
                using var cts       = new CancellationTokenSource();
                using var semaphore = new SemaphoreSlim(0, readerCount);
                var results = new List <int>();
                var q       = new AsyncBatchQueue <int>(10000,
                                                        new long[] { 100 },
                                                        new Func <int, long>[] { s => 1 }, secondary);

                async Task readerTask()
                {
                    var output = new List <int>();

                    await semaphore.WaitAsync();

                    // we're trying to test that the readers will 'eventually' read all the items, so we do several pulls here
                    await q.GetNextBatchAsync(output, 500);

                    await q.GetNextBatchAsync(output, 500);

                    await q.GetNextBatchAsync(output, 500);

                    await Task.Delay(100);

                    lock (results)
                    {
                        results.AddRange(output);
                    }
                };

                var readers = new Task[readerCount];
                for (var i = 0; i < readerCount; i++)
                {
                    readers[i] = readerTask();
                }

                for (var i = 0; i < itemCount; i++)
                {
                    if (i % 2 == 0)
                    {
                        await q.PushAsync(i);
                    }
                    else
                    {
                        await q.PushSecondaryAsync(new List <int> {
                            i
                        });
                    }
                }

                semaphore.Release(readerCount);
                await Task.WhenAll(readers);

                _output.WriteLine(results.Count.ToString());
                _output.WriteLine(q.EstimateSize().ToString());
                _output.WriteLine(q.EstimateSecondaryQueueSize().ToString());
                Assert.Equal(itemCount, results.Distinct().Count());
            }
            finally
            {
                if (Directory.Exists(dataDir))
                {
                    Directory.Delete(dataDir, true);
                }
            }
        }
        public void ThrowsOnIncorrectBatchSize()
        {
            Action act = () => _queue = new AsyncBatchQueue <int>(0);

            act.ShouldThrow <ArgumentOutOfRangeException>();
        }
		public void ThrowsOnIncorrectBatchSize()
		{
			_queue = new AsyncBatchQueue<int>( 0 );
		}