private async Task BatchAndSend(IEnumerable <AzureADUser> users, MakeBulkRequest makeRequest, int requestMax, int batchSize) { if (!users.Any()) { return; } int batchRequestId = 0; var queuedBatches = new ConcurrentQueue <ChunkOfUsers>( ChunksOfSize(users, requestMax) // Chop up the users into chunks of how many per graph request (20 for add, 1 for remove) .Select(x => new ChunkOfUsers { ToSend = x, Id = $"{batchRequestId++}-" })); await Task.WhenAll(Enumerable.Range(0, ConcurrentRequests).Select(x => ProcessQueue(queuedBatches, makeRequest, x, batchSize))); }
private async Task ProcessQueue(ConcurrentQueue <ChunkOfUsers> queue, MakeBulkRequest makeRequest, int threadNumber, int batchSize) { do { var toSend = new List <ChunkOfUsers>(); while (queue.TryDequeue(out var step)) { toSend.Add(step); if (toSend.Count == batchSize) { await ProcessBatch(queue, toSend, makeRequest, threadNumber); toSend.Clear(); } } if (toSend.Any()) { await ProcessBatch(queue, toSend, makeRequest, threadNumber); } } while (!queue.IsEmpty); // basically, that last ProcessBatch may have put more stuff in the queue }
private async Task ProcessBatch(ConcurrentQueue <ChunkOfUsers> queue, List <ChunkOfUsers> toSend, MakeBulkRequest makeRequest, int threadNumber) { await _log.LogMessageAsync(new LogMessage { Message = $"Thread number {threadNumber}: Sending a batch of {toSend.Count} requests.", RunId = RunId }); int requeued = 0; try { await foreach (var idToRetry in await SendBatch(new BatchRequestContent(toSend.Select(x => new BatchRequestStep(x.Id, makeRequest(x.ToSend))).ToArray()))) { requeued++; var chunkToRetry = toSend.First(x => x.Id == idToRetry); if (chunkToRetry.ShouldRetry) { queue.Enqueue(chunkToRetry.UpdateIdForRetry(threadNumber)); } } await _log.LogMessageAsync(new LogMessage { Message = $"{threadNumber}: {toSend.Count - requeued} out of {toSend.Count} requests succeeded. {queue.Count} left.", RunId = RunId }); } catch (ServiceException ex) { // winding up in here is a pretty rare event // Usually, it's because either a timeout happened or something else weird went on // the best thing to do is just requeue the chunks // but if a chunk has already been queued five times or so, drop it on the floor so we don't go forever // in the future, log the exception and which ones get dropped. await _log.LogMessageAsync(new LogMessage { Message = ex.GetBaseException().ToString(), RunId = RunId }); foreach (var chunk in toSend) { if (chunk.ShouldRetry) { queue.Enqueue(chunk.UpdateIdForRetry(threadNumber)); } } } }
private async Task <(ResponseCode ResponseCode, int SuccessCount)> ProcessBatch(ConcurrentQueue <ChunkOfUsers> queue, List <ChunkOfUsers> toSend, MakeBulkRequest makeRequest, int threadNumber) { await _log.LogMessageAsync(new LogMessage { Message = $"Thread number {threadNumber}: Sending a batch of {toSend.Count} requests.", RunId = RunId }); int requeued = 0; bool hasUnrecoverableErrors = false; var successfulRequests = toSend.SelectMany(x => x.ToSend).ToList().Count; try { await foreach (var idToRetry in await SendBatch(new BatchRequestContent(toSend.Select(x => new BatchRequestStep(x.Id, makeRequest(x.ToSend))).ToArray()))) { var chunkToRetry = toSend.First(x => x.Id == idToRetry.RequestId); successfulRequests -= chunkToRetry.ToSend.Count; if (idToRetry.ResponseCode == ResponseCode.Error) { hasUnrecoverableErrors = true; break; } if (chunkToRetry.ShouldRetry) { if (chunkToRetry.ToSend.Count > 1 && !string.IsNullOrWhiteSpace(idToRetry.AzureObjectId)) { var notFoundUser = chunkToRetry.ToSend.FirstOrDefault(x => x.ObjectId.ToString().Equals(idToRetry.AzureObjectId, StringComparison.InvariantCultureIgnoreCase)); if (notFoundUser != null) { chunkToRetry.ToSend.Remove(notFoundUser); var notFoundChunk = new ChunkOfUsers { Id = GetNewChunkId(), ToSend = new List <AzureADUser> { notFoundUser } }; requeued++; queue.Enqueue(notFoundChunk.UpdateIdForRetry(threadNumber)); await _log.LogMessageAsync(new LogMessage { Message = $"Queued {notFoundChunk.Id} from {chunkToRetry.Id}", RunId = RunId }); } } requeued++; var originalId = chunkToRetry.Id; queue.Enqueue(chunkToRetry.UpdateIdForRetry(threadNumber)); await _log.LogMessageAsync(new LogMessage { Message = $"Requeued {originalId} as {chunkToRetry.Id}", RunId = RunId }); } } await _log.LogMessageAsync(new LogMessage { Message = $"Thread number {threadNumber}: {toSend.Count - requeued} out of {toSend.Count} requests succeeded. {queue.Count} left.", RunId = RunId }); } catch (ServiceException ex) { // winding up in here is a pretty rare event // Usually, it's because either a timeout happened or something else weird went on // the best thing to do is just requeue the chunks // but if a chunk has already been queued five times or so, drop it on the floor so we don't go forever // in the future, log the exception and which ones get dropped. await _log.LogMessageAsync(new LogMessage { Message = ex.GetBaseException().ToString(), RunId = RunId }); foreach (var chunk in toSend) { if (chunk.ShouldRetry) { var originalId = chunk.Id; queue.Enqueue(chunk.UpdateIdForRetry(threadNumber)); await _log.LogMessageAsync(new LogMessage { Message = $"Requeued {originalId} as {chunk.Id}", RunId = RunId }); } } } var status = hasUnrecoverableErrors ? ResponseCode.Error : ResponseCode.Ok; return(status, successfulRequests); }
private async Task <(ResponseCode ResponseCode, int SuccessCount)> ProcessQueue(ConcurrentQueue <ChunkOfUsers> queue, MakeBulkRequest makeRequest, int threadNumber, int batchSize) { var successCount = 0; do { var toSend = new List <ChunkOfUsers>(); while (queue.TryDequeue(out var step)) { toSend.Add(step); if (toSend.Count == batchSize) { var response = await ProcessBatch(queue, toSend, makeRequest, threadNumber); toSend.Clear(); successCount += response.SuccessCount; if (response.ResponseCode == ResponseCode.Error) { return(response); } } } if (toSend.Any()) { var response = await ProcessBatch(queue, toSend, makeRequest, threadNumber); successCount += response.SuccessCount; if (response.ResponseCode == ResponseCode.Error) { return(response); } } } while (!queue.IsEmpty); // basically, that last ProcessBatch may have put more stuff in the queue return(ResponseCode.Ok, successCount); }
private async Task <(ResponseCode ResponseCode, int SuccessCount)> BatchAndSend(IEnumerable <AzureADUser> users, MakeBulkRequest makeRequest, int requestMax, int batchSize) { if (!users.Any()) { return(ResponseCode.Ok, 0); } var queuedBatches = new ConcurrentQueue <ChunkOfUsers>( ChunksOfSize(users, requestMax) // Chop up the users into chunks of how many per graph request (20 for add, 1 for remove) .Select(x => new ChunkOfUsers { ToSend = x, Id = GetNewChunkId() })); var responses = await Task.WhenAll(Enumerable.Range(0, ConcurrentRequests).Select(x => ProcessQueue(queuedBatches, makeRequest, x, batchSize))); var status = responses.Any(x => x.ResponseCode == ResponseCode.Error) ? ResponseCode.Error : ResponseCode.Ok; return(status, responses.Sum(x => x.SuccessCount)); }