public async Task GetBatchWhenLimitIsReached(int batchCount) { var q = new AsyncBatchQueue <string>(10000, new long[] { batchCount }, new Func <string, long>[] { s => 1 }); var output = new List <string>(); for (var i = 0; i < batchCount + 1; i++) { await q.PushAsync("a"); } var getTask = q.GetNextBatchAsync(output, 10 * 1000).AsTask(); // first call should returns a full batch Assert.True(getTask.Wait(1000)); Assert.Equal(batchCount, output.Count); if (batchCount == 0) { return; } // second call should return the last item output.Clear(); await q.GetNextBatchAsync(output, 100); Assert.Single(output); }
public async Task InMemoryQueue_ConcurrentRead(int readerCount, int itemCount) { var secondary = new InMemoryQueue <List <int> >(1000); using var cts = new CancellationTokenSource(); using var semaphore = new SemaphoreSlim(0, readerCount); var results = new List <int>(); var q = new AsyncBatchQueue <int>(10000, new long[] { 100 }, new Func <int, long>[] { s => 1 }, secondary); async Task readerTask() { var output = new List <int>(); await semaphore.WaitAsync(); // we're trying to test that the readers will 'eventually' read all the items, so we do several pulls here await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await Task.Delay(100); lock (results) { results.AddRange(output); } }; var readers = new Task[readerCount]; for (var i = 0; i < readerCount; i++) { readers[i] = readerTask(); } for (var i = 0; i < itemCount; i++) { if (i % 2 == 0) { await q.PushAsync(i); } else { await q.PushSecondaryAsync(new List <int> { i }); } } semaphore.Release(readerCount); await Task.WhenAll(readers); _output.WriteLine(results.Count.ToString()); _output.WriteLine(q.EstimateSize().ToString()); _output.WriteLine(q.EstimateSecondaryQueueSize().ToString()); Assert.Equal(itemCount, results.Distinct().Count()); }
public async Task InMemoryQueue_PullBothQueues() { var secondary = new InMemoryQueue <List <int> >(10); var q = new AsyncBatchQueue <int>(500, new long[] { 500 }, new Func <int, long>[] { s => 1 }, secondary); await q.PushSecondaryAsync(Enumerable.Range(0, 500).ToList()); for (var i = 500; i < 1000; i++) { await q.PushAsync(i); } // pull 3 times var output = new List <int>(); await q.GetNextBatchAsync(output, 1000); await q.GetNextBatchAsync(output, 1000); await q.GetNextBatchAsync(output, 1000); Assert.Equal(1000, output.Distinct().Count()); }
public async Task PushToFullQueue_Cancellation() { using var cts = new CancellationTokenSource(); var output = new List <string>(); var q = new AsyncBatchQueue <string>(1000, new long[] { 100 }, new Func <string, long>[] { s => 1 }); for (var i = 0; i < 1000; i++) { await q.PushAsync("a"); } var pushTask = q.PushAsync("b", cts.Token).AsTask(); cts.Cancel(); await Task.Delay(100); Assert.True(pushTask.IsCompleted); }
public async Task GetBatchWhenTimerExpires(int batchCount, int remaining) { var q = new AsyncBatchQueue <string>(10000, new long[] { batchCount }, new Func <string, long>[] { s => 1 }); var output = new List <string>(); for (var i = 0; i < batchCount - remaining; i++) { await q.PushAsync("a"); } var getTask = q.GetNextBatchAsync(output, 500).AsTask(); Assert.True(getTask.Wait(5000)); Assert.Equal(batchCount - remaining, output.Count); }
public async Task GetBatch_Cancellation() { using var cts = new CancellationTokenSource(); var output = new List <string>(); var q = new AsyncBatchQueue <string>(10000, new long[] { 100 }, new Func <string, long>[] { s => 1 }); for (var i = 0; i < 99; i++) { await q.PushAsync("a"); } var getTask = q.GetNextBatchAsync(output, int.MaxValue, cts.Token).AsTask(); cts.Cancel(); await Task.Delay(200); Assert.True(getTask.IsCompleted); }
/// <inheritdoc/> public void OnNext(IEnvelope value) { try { var record = CreateRecord(value); var valTask = _queue.PushAsync(new Envelope <T>(record, value.Timestamp, value.BookmarkData, value.Position), _stopToken); if (!valTask.IsCompleted) { // AsTask() allocates memory so we only calls that when the task could not complete synchronously valTask.AsTask().GetAwaiter().GetResult(); } } catch (OperationCanceledException) { return; } catch (Exception ex) { _logger.LogError(ex, "Error processing events"); } }
public async Task PersistentQueue_PullBothQueues() { var dataDir = Path.Combine(AppContext.BaseDirectory, Guid.NewGuid().ToString()); var queueDir = Path.Combine(dataDir, _queueDirName); Directory.CreateDirectory(queueDir); var fileProvider = new ProtectedAppDataFileProvider(dataDir); try { var secondary = new FilePersistentQueue <List <int> >(10, queueDir, new IntegerListSerializer(), fileProvider, NullLogger.Instance); var q = new AsyncBatchQueue <int>(500, new long[] { 500 }, new Func <int, long>[] { s => 1 }, secondary); await q.PushSecondaryAsync(Enumerable.Range(0, 500).ToList()); for (var i = 500; i < 1000; i++) { await q.PushAsync(i); } // pull 3 times var output = new List <int>(); await q.GetNextBatchAsync(output, 1000); await q.GetNextBatchAsync(output, 1000); await q.GetNextBatchAsync(output, 1000); Assert.Equal(1000, output.Distinct().Count()); } finally { if (Directory.Exists(dataDir)) { Directory.Delete(dataDir, true); } } }
public async Task LimitIsReached_OrderIsPresevered() { var q = new AsyncBatchQueue <int>(10000, new long[] { 100 }, new Func <int, long>[] { s => 1 }); for (var i = 0; i < 150 + 1; i++) { await q.PushAsync(i); } var output = new List <int>(); await q.GetNextBatchAsync(output, 1000); // pull again await q.GetNextBatchAsync(output, 100); for (var i = 0; i < 150; i++) { Assert.Equal(i, output[i]); } }
public async Task PersistentQueue_ConcurrentRead(int readerCount, int itemCount) { var dataDir = Path.Combine(AppContext.BaseDirectory, Guid.NewGuid().ToString()); var queueDir = Path.Combine(dataDir, _queueDirName); Directory.CreateDirectory(queueDir); var fileProvider = new ProtectedAppDataFileProvider(dataDir); try { var secondary = new FilePersistentQueue <List <int> >(100000, queueDir, new IntegerListSerializer(), fileProvider, NullLogger.Instance); using var cts = new CancellationTokenSource(); using var semaphore = new SemaphoreSlim(0, readerCount); var results = new List <int>(); var q = new AsyncBatchQueue <int>(10000, new long[] { 100 }, new Func <int, long>[] { s => 1 }, secondary); async Task readerTask() { var output = new List <int>(); await semaphore.WaitAsync(); // we're trying to test that the readers will 'eventually' read all the items, so we do several pulls here await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await Task.Delay(100); lock (results) { results.AddRange(output); } }; var readers = new Task[readerCount]; for (var i = 0; i < readerCount; i++) { readers[i] = readerTask(); } for (var i = 0; i < itemCount; i++) { if (i % 2 == 0) { await q.PushAsync(i); } else { await q.PushSecondaryAsync(new List <int> { i }); } } semaphore.Release(readerCount); await Task.WhenAll(readers); _output.WriteLine(results.Count.ToString()); _output.WriteLine(q.EstimateSize().ToString()); _output.WriteLine(q.EstimateSecondaryQueueSize().ToString()); Assert.Equal(itemCount, results.Distinct().Count()); } finally { if (Directory.Exists(dataDir)) { Directory.Delete(dataDir, true); } } }