public async Task <string> CreateBatchDocument(string requestId, RetryType retryType, string[] failedMessageRetryIds, string originator, DateTime startTime, DateTime?last = null, string batchName = null, string classifier = null) { var batchDocumentId = RetryBatch.MakeDocumentId(Guid.NewGuid().ToString()); using (var session = store.OpenAsyncSession()) { await session.StoreAsync(new RetryBatch { Id = batchDocumentId, Context = batchName, RequestId = requestId, RetryType = retryType, Originator = originator, Classifier = classifier, StartTime = startTime, Last = last, InitialBatchSize = failedMessageRetryIds.Length, RetrySessionId = RetrySessionId, FailureRetries = failedMessageRetryIds, Status = RetryBatchStatus.MarkingDocuments }).ConfigureAwait(false); await session.SaveChangesAsync().ConfigureAwait(false); } return(batchDocumentId); }
async Task Forward(RetryBatch forwardingBatch, IAsyncDocumentSession session, CancellationToken cancellationToken) { var messageCount = forwardingBatch.FailureRetries.Count; Log.InfoFormat("Forwarding batch {0} with {1} messages", forwardingBatch.Id, messageCount); await retryingManager.Forwarding(forwardingBatch.RequestId, forwardingBatch.RetryType) .ConfigureAwait(false); if (isRecoveringFromPrematureShutdown) { Log.Warn("Recovering from premature shutdown. Starting forwarder in timeout mode"); await returnToSender.Run(IsPartOfStagedBatch(forwardingBatch.StagingId), cancellationToken) .ConfigureAwait(false); await retryingManager.ForwardedBatch(forwardingBatch.RequestId, forwardingBatch.RetryType, forwardingBatch.InitialBatchSize) .ConfigureAwait(false); } else { Log.DebugFormat("Starting forwarder in counting mode with {0} messages", messageCount); await returnToSender.Run(IsPartOfStagedBatch(forwardingBatch.StagingId), cancellationToken, messageCount) .ConfigureAwait(false); await retryingManager.ForwardedBatch(forwardingBatch.RequestId, forwardingBatch.RetryType, messageCount) .ConfigureAwait(false); } session.Delete(forwardingBatch); Log.InfoFormat("Retry batch {0} done", forwardingBatch.Id); }
async Task <int> Stage(RetryBatch stagingBatch, IAsyncDocumentSession session) { var stagingId = Guid.NewGuid().ToString(); var failedMessageRetryDocs = await session.LoadAsync <FailedMessageRetry>(stagingBatch.FailureRetries).ConfigureAwait(false); var matchingFailures = failedMessageRetryDocs .Where(r => r != null && r.RetryBatchId == stagingBatch.Id) .Distinct(FailedMessageEqualityComparer.Instance) .ToArray(); foreach (var failedMessageRetry in failedMessageRetryDocs) { if (failedMessageRetry != null) { session.Advanced.Evict(failedMessageRetry); } } var failedMessagesById = matchingFailures.ToDictionary(x => x.FailedMessageId, x => x); if (!failedMessagesById.Any()) { Log.Info($"Retry batch {stagingBatch.Id} cancelled as all matching unresolved messages are already marked for retry as part of another batch."); session.Delete(stagingBatch); return(0); } var failedMessagesDocs = await session.LoadAsync <FailedMessage>(failedMessagesById.Keys).ConfigureAwait(false); var messages = failedMessagesDocs .Where(m => m != null) .ToArray(); Log.Info($"Staging {messages.Length} messages for retry batch {stagingBatch.Id} with staging attempt Id {stagingId}."); await Task.WhenAll(messages.Select(m => TryStageMessage(m, stagingId, failedMessagesById[m.Id])).ToArray()).ConfigureAwait(false); if (stagingBatch.RetryType != RetryType.FailureGroup) //FailureGroup published on completion of entire group { var failedIds = messages.Select(x => x.UniqueMessageId).ToArray(); await domainEvents.Raise(new MessagesSubmittedForRetry { FailedMessageIds = failedIds, NumberOfFailedMessages = failedIds.Length, Context = stagingBatch.Context }).ConfigureAwait(false); } var msgLookup = messages.ToLookup(x => x.Id); stagingBatch.Status = RetryBatchStatus.Forwarding; stagingBatch.StagingId = stagingId; stagingBatch.FailureRetries = matchingFailures.Where(x => msgLookup[x.FailedMessageId].Any()).Select(x => x.Id).ToArray(); Log.Info($"Retry batch {stagingBatch.Id} staged with Staging Id {stagingBatch.StagingId} and {stagingBatch.FailureRetries.Count} matching failure retries"); return(messages.Length); }
async Task <int> Stage(RetryBatch stagingBatch, IAsyncDocumentSession session) { var stagingId = Guid.NewGuid().ToString(); var failedMessageRetryDocs = await session.LoadAsync <FailedMessageRetry>(stagingBatch.FailureRetries).ConfigureAwait(false); var matchingFailures = failedMessageRetryDocs .Where(r => r != null && r.RetryBatchId == stagingBatch.Id) .ToArray(); foreach (var failedMessageRetry in failedMessageRetryDocs) { if (failedMessageRetry != null) { session.Advanced.Evict(failedMessageRetry); } } var messageIds = matchingFailures.Select(x => x.FailedMessageId).ToArray(); if (!messageIds.Any()) { Log.Info($"Retry batch {stagingBatch.Id} cancelled as all matching unresolved messages are already marked for retry as part of another batch"); session.Delete(stagingBatch); return(0); } var failedMessagesDocs = await session.LoadAsync <FailedMessage>(messageIds).ConfigureAwait(false); var messages = failedMessagesDocs .Where(m => m != null) .ToArray(); Log.DebugFormat("Staging {0} messages for Retry Batch {1} with staging attempt Id {2}", messages.Length, stagingBatch.Id, stagingId); Parallel.ForEach(messages, message => StageMessage(message, stagingId)); if (stagingBatch.RetryType != RetryType.FailureGroup) //FailureGroup published on completion of entire group { var failedIds = messages.Select(x => x.UniqueMessageId).ToArray(); domainEvents.Raise(new MessagesSubmittedForRetry { FailedMessageIds = failedIds, NumberOfFailedMessages = failedIds.Length, Context = stagingBatch.Context }); } var msgLookup = messages.ToLookup(x => x.Id); stagingBatch.Status = RetryBatchStatus.Forwarding; stagingBatch.StagingId = stagingId; stagingBatch.FailureRetries = matchingFailures.Where(x => msgLookup[x.FailedMessageId].Any()).Select(x => x.Id).ToArray(); Log.DebugFormat("Retry batch {0} staged with Staging Id {1} and {2} matching failure retries", stagingBatch.Id, stagingBatch.StagingId, stagingBatch.FailureRetries.Count); Log.InfoFormat("Retry batch {0} staged {1} messages", stagingBatch.Id, messages.Length); return(messages.Length); }
int Stage(RetryBatch stagingBatch, IDocumentSession session) { var stagingId = Guid.NewGuid().ToString(); var failedMessageRetryDocs = session.Load <FailedMessageRetry>(stagingBatch.FailureRetries); var matchingFailures = failedMessageRetryDocs .Where(r => r != null && r.RetryBatchId == stagingBatch.Id) .ToArray(); foreach (var failedMessageRetry in failedMessageRetryDocs) { if (failedMessageRetry != null) { session.Advanced.Evict(failedMessageRetry); } } var messageIds = matchingFailures.Select(x => x.FailedMessageId).ToArray(); if (!messageIds.Any()) { Log.Info($"Retry batch {stagingBatch.Id} cancelled as all matching unresolved messages are already marked for retry as part of another batch"); session.Delete(stagingBatch); return(0); } var messages = session.Load <FailedMessage>(messageIds) .Where(m => m != null) .ToArray(); Parallel.ForEach(messages, message => StageMessage(message, stagingId)); if (stagingBatch.RetryType != RetryType.FailureGroup) //FailureGroup published on completion of entire group { bus.Publish <MessagesSubmittedForRetry>(m => { var failedIds = messages.Select(x => x.UniqueMessageId).ToArray(); m.FailedMessageIds = failedIds; m.NumberOfFailedMessages = failedIds.Length; m.Context = stagingBatch.Context; }); } var msgLookup = messages.ToLookup(x => x.Id); stagingBatch.Status = RetryBatchStatus.Forwarding; stagingBatch.StagingId = stagingId; stagingBatch.FailureRetries = matchingFailures.Where(x => msgLookup[x.FailedMessageId].Any()).Select(x => x.Id).ToArray(); Log.InfoFormat("Retry batch {0} staged {1} messages", stagingBatch.Id, messages.Length); return(messages.Length); }
void MakeSureForwardingBatchIsIncludedAsOpen(string classifier, RetryBatch forwardingBatch, List <GroupOperation> open) { if (forwardingBatch == null || forwardingBatch.Classifier != classifier) { return; } if (IsCurrentForwardingOperationIncluded(open, forwardingBatch)) { return; } var fg = MapOpenForForwardingOperation(classifier, forwardingBatch, retryingManager.GetStatusForRetryOperation(forwardingBatch.RequestId, RetryType.FailureGroup)); open.Add(fg); }
static GroupOperation MapOpenForForwardingOperation(string classifier, RetryBatch forwardingBatch, InMemoryRetry summary) { var progress = summary.GetProgress(); return(new GroupOperation { Id = forwardingBatch.RequestId, Title = forwardingBatch.Originator, Type = classifier, Count = 0, Last = summary.Last, OperationStatus = summary.RetryState.ToString(), OperationFailed = summary.Failed, OperationProgress = progress.Percentage, OperationRemainingCount = progress.MessagesRemaining, OperationCompletionTime = summary.CompletionTime, OperationStartTime = summary.Started, NeedUserAcknowledgement = false }); }
void Forward(RetryBatch forwardingBatch, IDocumentSession session, CancellationToken cancellationToken) { var messageCount = forwardingBatch.FailureRetries.Count; retryOperationManager.Forwarding(forwardingBatch.RequestId, forwardingBatch.RetryType); if (isRecoveringFromPrematureShutdown) { returnToSender.Run(IsPartOfStagedBatch(forwardingBatch.StagingId), cancellationToken); retryOperationManager.ForwardedBatch(forwardingBatch.RequestId, forwardingBatch.RetryType, forwardingBatch.InitialBatchSize); } else { returnToSender.Run(IsPartOfStagedBatch(forwardingBatch.StagingId), cancellationToken, messageCount); retryOperationManager.ForwardedBatch(forwardingBatch.RequestId, forwardingBatch.RetryType, messageCount); } session.Delete(forwardingBatch); Log.InfoFormat("Retry batch {0} done", forwardingBatch.Id); }
async Task Forward(RetryBatch forwardingBatch, IAsyncDocumentSession session, CancellationToken cancellationToken) { var messageCount = forwardingBatch.FailureRetries.Count; await retryingManager.Forwarding(forwardingBatch.RequestId, forwardingBatch.RetryType) .ConfigureAwait(false); if (isRecoveringFromPrematureShutdown) { Log.Warn($"Recovering from premature shutdown. Starting forwarder for batch {forwardingBatch.Id} in timeout mode."); await returnToSender.Run(forwardingBatch.Id, IsPartOfStagedBatch(forwardingBatch.StagingId), cancellationToken, null) .ConfigureAwait(false); await retryingManager.ForwardedBatch(forwardingBatch.RequestId, forwardingBatch.RetryType, forwardingBatch.InitialBatchSize) .ConfigureAwait(false); } else { if (messageCount == 0) { Log.Info($"Skipping forwarding of batch {forwardingBatch.Id}: no messages to forward."); } else { Log.Info($"Starting forwarder for batch {forwardingBatch.Id} with {messageCount} messages in counting mode."); await returnToSender.Run(forwardingBatch.Id, IsPartOfStagedBatch(forwardingBatch.StagingId), cancellationToken, messageCount) .ConfigureAwait(false); } await retryingManager.ForwardedBatch(forwardingBatch.RequestId, forwardingBatch.RetryType, messageCount) .ConfigureAwait(false); } session.Delete(forwardingBatch); Log.Info($"Done forwarding batch {forwardingBatch.Id}."); }
public string CreateBatchDocument(string requestId, RetryType retryType, int initialBatchSize, string originator, DateTime startTime, DateTime?last = null, string batchName = null, string classifier = null) { var batchDocumentId = RetryBatch.MakeDocumentId(Guid.NewGuid().ToString()); using (var session = store.OpenSession()) { session.Store(new RetryBatch { Id = batchDocumentId, Context = batchName, RequestId = requestId, RetryType = retryType, Originator = originator, Classifier = classifier, StartTime = startTime, Last = last, InitialBatchSize = initialBatchSize, RetrySessionId = RetrySessionId, Status = RetryBatchStatus.MarkingDocuments }); session.SaveChanges(); } return(batchDocumentId); }
async Task <int> Stage(RetryBatch stagingBatch, IAsyncDocumentSession session) { var stagingId = Guid.NewGuid().ToString(); var failedMessageRetryDocs = await session.LoadAsync <FailedMessageRetry>(stagingBatch.FailureRetries).ConfigureAwait(false); var failedMessageRetriesById = failedMessageRetryDocs .Where(r => r != null && r.RetryBatchId == stagingBatch.Id) .Distinct(FailedMessageEqualityComparer.Instance) .ToDictionary(x => x.FailedMessageId, x => x); foreach (var failedMessageRetry in failedMessageRetryDocs) { if (failedMessageRetry != null) { session.Advanced.Evict(failedMessageRetry); } } if (failedMessageRetriesById.Count == 0) { Log.Info($"Retry batch {stagingBatch.Id} cancelled as all matching unresolved messages are already marked for retry as part of another batch."); session.Delete(stagingBatch); return(0); } var failedMessagesDocs = await session.LoadAsync <FailedMessage>(failedMessageRetriesById.Keys).ConfigureAwait(false); var messages = failedMessagesDocs.Where(m => m != null).ToArray(); Log.Info($"Staging {messages.Length} messages for retry batch {stagingBatch.Id} with staging attempt Id {stagingId}."); var previousAttemptFailed = false; var transportOperations = new TransportOperation[messages.Length]; var current = 0; foreach (var failedMessage in messages) { transportOperations[current++] = ToTransportOperation(failedMessage, stagingId); if (!previousAttemptFailed) { previousAttemptFailed = failedMessageRetriesById[failedMessage.Id].StageAttempts > 0; } // should not be done concurrently due to sessions not being thread safe failedMessage.Status = FailedMessageStatus.RetryIssued; } await TryDispatch(transportOperations, messages, failedMessageRetriesById, stagingId, previousAttemptFailed).ConfigureAwait(false); if (stagingBatch.RetryType != RetryType.FailureGroup) //FailureGroup published on completion of entire group { var failedIds = messages.Select(x => x.UniqueMessageId).ToArray(); await domainEvents.Raise(new MessagesSubmittedForRetry { FailedMessageIds = failedIds, NumberOfFailedMessages = failedIds.Length, Context = stagingBatch.Context }).ConfigureAwait(false); } var msgLookup = messages.ToLookup(x => x.Id); stagingBatch.Status = RetryBatchStatus.Forwarding; stagingBatch.StagingId = stagingId; stagingBatch.FailureRetries = failedMessageRetriesById.Values.Where(x => msgLookup[x.FailedMessageId].Any()).Select(x => x.Id).ToArray(); Log.Info($"Retry batch {stagingBatch.Id} staged with Staging Id {stagingBatch.StagingId} and {stagingBatch.FailureRetries.Count} matching failure retries"); return(messages.Length); }
static bool IsCurrentForwardingOperationIncluded(List <GroupOperation> open, RetryBatch forwardingBatch) { return(open.Any(x => x.Id == forwardingBatch.RequestId && x.Type == forwardingBatch.Classifier && forwardingBatch.RetryType == RetryType.FailureGroup)); }
void Forward(RetryBatch forwardingBatch, IDocumentSession session) { var messageCount = forwardingBatch.FailureRetries.Count; if (isRecoveringFromPrematureShutdown) { returnToSender.Run(IsPartOfStagedBatch(forwardingBatch.StagingId)); } else if(messageCount > 0) { returnToSender.Run(IsPartOfStagedBatch(forwardingBatch.StagingId), messageCount); } session.Delete(forwardingBatch); Log.InfoFormat("Retry batch {0} done", forwardingBatch.Id); }
bool Stage(RetryBatch stagingBatch, IDocumentSession session) { var stagingId = Guid.NewGuid().ToString(); var matchingFailures = session.Load<FailedMessageRetry>(stagingBatch.FailureRetries) .Where(r => r != null && r.RetryBatchId == stagingBatch.Id) .ToArray(); var messageIds = matchingFailures.Select(x => x.FailedMessageId).ToArray(); if (!messageIds.Any()) { Log.InfoFormat("Retry batch {0} cancelled as all matching unresolved messages are already marked for retry as part of another batch", stagingBatch.Id); session.Delete(stagingBatch); return false; } var messages = session.Load<FailedMessage>(messageIds); foreach (var message in messages) { StageMessage(message, stagingId); } bus.Publish<MessagesSubmittedForRetry>(m => { m.FailedMessageIds = messages.Select(x => x.UniqueMessageId).ToArray(); m.Context = stagingBatch.Context; }); stagingBatch.Status = RetryBatchStatus.Forwarding; stagingBatch.StagingId = stagingId; stagingBatch.FailureRetries = matchingFailures.Select(x => x.Id).ToArray(); Log.InfoFormat("Retry batch {0} staged {1} messages", stagingBatch.Id, messages.Length); return true; }