protected void PublishMetrics(string prefix) { _metrics?.PublishCounters(Id, MetricsConstants.CATEGORY_SINK, CounterTypeEnum.Increment, new Dictionary <string, MetricValue>() { { prefix + MetricsConstants.BYTES_ACCEPTED, new MetricValue(_bytesAttempted, MetricUnit.Bytes) }, { prefix + MetricsConstants.RECORDS_ATTEMPTED, new MetricValue(_recordsAttempted) }, { prefix + MetricsConstants.RECORDS_FAILED_NONRECOVERABLE, new MetricValue(_recordsFailedNonrecoverable) }, { prefix + MetricsConstants.RECORDS_FAILED_RECOVERABLE, new MetricValue(_recordsFailedRecoverable) }, { prefix + MetricsConstants.RECORDS_SUCCESS, new MetricValue(_recordsSuccess) }, { prefix + MetricsConstants.RECOVERABLE_SERVICE_ERRORS, new MetricValue(_recoverableServiceErrors) }, { prefix + MetricsConstants.NONRECOVERABLE_SERVICE_ERRORS, new MetricValue(_nonrecoverableServiceErrors) } }); var currentBufSize = _queue.EstimateSize(); var currentSecondaryQueueSize = _queue.EstimateSecondaryQueueSize(); var bufferFull = _queue.Capacity <= currentBufSize; var secondaryQueueFull = _maxSecondaryQueueBatches <= _queue.EstimateSecondaryQueueSize(); _metrics?.PublishCounters(Id, MetricsConstants.CATEGORY_SINK, CounterTypeEnum.CurrentValue, new Dictionary <string, MetricValue>() { { prefix + MetricsConstants.LATENCY, new MetricValue(_latency, MetricUnit.Milliseconds) }, { prefix + MetricsConstants.CLIENT_LATENCY, new MetricValue(_clientLatency, MetricUnit.Milliseconds) }, { prefix + MetricsConstants.BATCHES_IN_MEMORY_BUFFER, new MetricValue(currentBufSize, MetricUnit.Count) }, { prefix + MetricsConstants.BATCHES_IN_PERSISTENT_QUEUE, new MetricValue(currentSecondaryQueueSize, MetricUnit.Count) }, { prefix + MetricsConstants.IN_MEMORY_BUFFER_FULL, new MetricValue(bufferFull?1:0, MetricUnit.Count) }, { prefix + MetricsConstants.PERSISTENT_QUEUE_FULL, new MetricValue(secondaryQueueFull ? 1:0, MetricUnit.Count) } }); ResetIncrementalCounters(); }
public async Task InMemoryQueue_ConcurrentRead(int readerCount, int itemCount) { var secondary = new InMemoryQueue <List <int> >(1000); using var cts = new CancellationTokenSource(); using var semaphore = new SemaphoreSlim(0, readerCount); var results = new List <int>(); var q = new AsyncBatchQueue <int>(10000, new long[] { 100 }, new Func <int, long>[] { s => 1 }, secondary); async Task readerTask() { var output = new List <int>(); await semaphore.WaitAsync(); // we're trying to test that the readers will 'eventually' read all the items, so we do several pulls here await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await Task.Delay(100); lock (results) { results.AddRange(output); } }; var readers = new Task[readerCount]; for (var i = 0; i < readerCount; i++) { readers[i] = readerTask(); } for (var i = 0; i < itemCount; i++) { if (i % 2 == 0) { await q.PushAsync(i); } else { await q.PushSecondaryAsync(new List <int> { i }); } } semaphore.Release(readerCount); await Task.WhenAll(readers); _output.WriteLine(results.Count.ToString()); _output.WriteLine(q.EstimateSize().ToString()); _output.WriteLine(q.EstimateSecondaryQueueSize().ToString()); Assert.Equal(itemCount, results.Distinct().Count()); }
public async Task PersistentQueue_ConcurrentRead(int readerCount, int itemCount) { var dataDir = Path.Combine(AppContext.BaseDirectory, Guid.NewGuid().ToString()); var queueDir = Path.Combine(dataDir, _queueDirName); Directory.CreateDirectory(queueDir); var fileProvider = new ProtectedAppDataFileProvider(dataDir); try { var secondary = new FilePersistentQueue <List <int> >(100000, queueDir, new IntegerListSerializer(), fileProvider, NullLogger.Instance); using var cts = new CancellationTokenSource(); using var semaphore = new SemaphoreSlim(0, readerCount); var results = new List <int>(); var q = new AsyncBatchQueue <int>(10000, new long[] { 100 }, new Func <int, long>[] { s => 1 }, secondary); async Task readerTask() { var output = new List <int>(); await semaphore.WaitAsync(); // we're trying to test that the readers will 'eventually' read all the items, so we do several pulls here await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await q.GetNextBatchAsync(output, 500); await Task.Delay(100); lock (results) { results.AddRange(output); } }; var readers = new Task[readerCount]; for (var i = 0; i < readerCount; i++) { readers[i] = readerTask(); } for (var i = 0; i < itemCount; i++) { if (i % 2 == 0) { await q.PushAsync(i); } else { await q.PushSecondaryAsync(new List <int> { i }); } } semaphore.Release(readerCount); await Task.WhenAll(readers); _output.WriteLine(results.Count.ToString()); _output.WriteLine(q.EstimateSize().ToString()); _output.WriteLine(q.EstimateSecondaryQueueSize().ToString()); Assert.Equal(itemCount, results.Distinct().Count()); } finally { if (Directory.Exists(dataDir)) { Directory.Delete(dataDir, true); } } }