///<summary>Gets the next message in the queue.</summary> ///<returns>A non-null queue CloudQueueMessage.</returns> ///<remarks>This is a blocking method. If the queue is empty, the call will never return.</remarks> public CloudQueueMessage GetMessage() { while (true) { var message = queue.GetMessage(); while (message == null) { Thread.Sleep(TimeSpan.FromSeconds(5)); message = queue.GetMessage(); } var dataContext = tableClient.GetDataServiceContext(); var pm = new ProcessedMessage(queue, message); dataContext.AddObject(TableName, pm); try { dataContext.SaveChanges(); } catch (DataServiceRequestException ex) { Debug.Assert(ex.Response.Count() == 1); if (ex.Response.All(o => o.StatusCode == 409)) //HTTP 409 Conflict continue; else throw; } //If we failed to save, assume that a different machine already received the message. return message; } }
public void Processed_messages_are_being_expired() { var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = DateTime.UtcNow.AddHours(-(Settings.HoursToKeepMessagesBeforeExpiring * 3)), }; var processedMessage2 = new ProcessedMessage { Id = "2", ProcessedAt = DateTime.UtcNow.AddHours(-(Settings.HoursToKeepMessagesBeforeExpiring * 2)), }; processedMessage2.MessageMetadata["IsSystemMessage"] = true; using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.Store(processedMessage2); session.SaveChanges(); } documentStore.WaitForIndexing(); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 2); using (var session = documentStore.OpenSession()) { var msg = session.Load <ProcessedMessage>(processedMessage.Id); Assert.Null(msg); msg = session.Load <ProcessedMessage>(processedMessage2.Id); Assert.Null(msg); } }
// Tweet Controller private static bool NewTweet(Tweet tweet) { // Check for validity if (!Validator.IsTweetValid(tweet)) { error = Validator.Error; return(false); } // Generate ID tweet.Id = IdGeneratorSingleton.Instance.NewTweetId(); // Sanitize Sanitizer.SanitizeTweet(tweet); // Add to the list in memory DataBaseSingleton.Instance.TweetList.Add(tweet); // Serialize Persistence.Serialize(); // Display it ProcessedMessage processedMessageWindow = new ProcessedMessage(tweet); processedMessageWindow.Show(); return(true); }
public ProcessedMessage ConvertToSaveMessage(TransportMessage message) { var metadata = new Dictionary <string, object> { ["MessageId"] = message.Id, ["MessageIntent"] = message.MessageIntent, ["HeadersForSearching"] = string.Join(" ", message.Headers.Values) }; foreach (var enricher in enrichers) { enricher.Enrich(message.Headers, metadata); } bodyStorageEnricher.StoreAuditMessageBody( message.Body, message.Headers, metadata); var auditMessage = new ProcessedMessage(message.Headers, metadata) { // We do this so Raven does not spend time assigning a hilo key Id = $"ProcessedMessages/{Guid.NewGuid()}" }; return(auditMessage); }
// Controller for message of type SMS private static bool NewSms(Sms sms) { // Check for validity if (!Validator.IsSmsValid(sms)) { error = Validator.Error; return(false); } // Generate ID sms.Id = IdGeneratorSingleton.Instance.NewSmsId(); // Sanitize Sanitizer.SanitizeSms(sms); // Add to the list in memory DataBaseSingleton.Instance.SmsList.Add(sms); // Serialize Persistence.Serialize(); // Display it ProcessedMessage processedMessageWindow = new ProcessedMessage(sms); processedMessageWindow.Show(); return(true); }
public void Old_documents_are_being_expired() { using (var documentStore = InMemoryStoreBuilder.GetInMemoryStore()) { var expiredDate = DateTime.UtcNow.AddDays(-3); var thresholdDate = DateTime.UtcNow.AddDays(-2); var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = expiredDate }; using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.SaveChanges(); } RunExpiry(documentStore, thresholdDate); using (var session = documentStore.OpenSession()) { Assert.IsEmpty(session.Query <ProcessedMessage>()); } } }
// Email Controller private static bool NewEmail(Email email) { // Check for validity if (!Validator.IsEmailValid(email)) { error = Validator.Error; return(false); } // Generate ID email.Id = IdGeneratorSingleton.Instance.NewEmailId(); // Sanitize Sanitizer.SanitizeEmail(email); // Add to the list in memory DataBaseSingleton.Instance.EmailList.Add(email); // Serialize Persistence.Serialize(); // Display it ProcessedMessage processedMessageWindow = new ProcessedMessage(email); processedMessageWindow.Show(); return(true); }
public void Filter_out_system_messages() { using (var session = documentStore.OpenSession()) { var processedMessage = new ProcessedMessage { Id = "1", }; processedMessage.MakeSystemMessage(); session.Store(processedMessage); var processedMessage2 = new ProcessedMessage { Id = "2", }; processedMessage2.MakeSystemMessage(false); session.Store(processedMessage2); session.SaveChanges(); } using (var session = documentStore.OpenSession()) { var results = session.Query<MessagesViewIndex.SortAndFilterOptions, MessagesViewIndex>() .Customize(x => x.WaitForNonStaleResults()) .Where(x => !x.IsSystemMessage) .OfType<ProcessedMessage>() .ToList(); Assert.AreEqual(1, results.Count); Assert.AreNotEqual("1", results.Single().Id); } }
public void Recent_processed_messages_are_not_being_expired() { new ExpiryProcessedMessageIndex().Execute(documentStore); var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = DateTime.UtcNow, }; using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.SaveChanges(); } documentStore.WaitForIndexing(); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 2); using (var session = documentStore.OpenSession()) { var msg = session.Load <ProcessedMessage>(processedMessage.Id); Assert.NotNull(msg); } }
public void Filter_out_system_messages() { using (var session = documentStore.OpenSession()) { var processedMessage = new ProcessedMessage { Id = "1", }; processedMessage.MakeSystemMessage(); session.Store(processedMessage); var processedMessage2 = new ProcessedMessage { Id = "2", }; processedMessage2.MakeSystemMessage(false); session.Store(processedMessage2); session.SaveChanges(); } using (var session = documentStore.OpenSession()) { var results = session.Query <MessagesViewIndex.SortAndFilterOptions, MessagesViewIndex>() .Customize(x => x.WaitForNonStaleResults()) .Where(x => !x.IsSystemMessage) .OfType <ProcessedMessage>() .ToList(); Assert.AreEqual(1, results.Count); Assert.AreNotEqual("1", results.Single().Id); } }
public void Processed_messages_are_being_expired() { var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = DateTime.UtcNow.AddHours(-(Settings.HoursToKeepMessagesBeforeExpiring * 3)), }; var processedMessage2 = new ProcessedMessage { Id = "2", ProcessedAt = DateTime.UtcNow.AddHours(-(Settings.HoursToKeepMessagesBeforeExpiring * 2)), }; processedMessage2.MessageMetadata["IsSystemMessage"] = true; using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.Store(processedMessage2); session.SaveChanges(); } WaitForIndexing(documentStore); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 2); using (var session = documentStore.OpenSession()) { var msg = session.Load<ProcessedMessage>(processedMessage.Id); Assert.Null(msg); msg = session.Load<ProcessedMessage>(processedMessage2.Id); Assert.Null(msg); } }
public async Task <ProcessedMessage> ConvertToSaveMessage(MessageContext message) { if (!message.Headers.TryGetValue(Headers.MessageId, out var messageId)) { messageId = DeterministicGuid.MakeId(message.MessageId).ToString(); } var metadata = new ConcurrentDictionary <string, object> { ["MessageId"] = messageId, ["MessageIntent"] = message.Headers.MessageIntent(), }; var enricherTasks = new List <Task>(enrichers.Length); // ReSharper disable once LoopCanBeConvertedToQuery foreach (var enricher in enrichers) { enricherTasks.Add(enricher.Enrich(message.Headers, metadata)); } await Task.WhenAll(enricherTasks) .ConfigureAwait(false); await bodyStorageEnricher.StoreAuditMessageBody(message.Body, message.Headers, metadata) .ConfigureAwait(false); var auditMessage = new ProcessedMessage(message.Headers, new Dictionary <string, object>(metadata)) { // We do this so Raven does not spend time assigning a hilo key Id = $"ProcessedMessages/{Guid.NewGuid()}" }; return(auditMessage); }
public async Task Stored_bodies_are_being_removed_when_message_expires() { using (var documentStore = InMemoryStoreBuilder.GetInMemoryStore()) { var expiredDate = DateTime.UtcNow.AddDays(-3); var thresholdDate = DateTime.UtcNow.AddDays(-2); // Store expired message with associated body var messageId = "21"; var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = expiredDate, MessageMetadata = new Dictionary <string, object> { { "MessageId", messageId } } }; using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.SaveChanges(); } var body = new byte[] { 1, 2, 3, 4, 5 }; var bodyStorage = new RavenAttachmentsBodyStorage { DocumentStore = documentStore }; using (var stream = new MemoryStream(body)) { await bodyStorage.Store(messageId, "binary", 5, stream); } RunExpiry(documentStore, thresholdDate); // Verify message expired using (var session = documentStore.OpenSession()) { Assert.Null(session.Load <ProcessedMessage>(processedMessage.Id)); } // Verify body expired Assert.False((await bodyStorage.TryFetch(messageId)).HasResult, "Audit document body should be deleted"); } }
private InterimMessage GetInterimMessage(ProcessedMessage processedMessage) { var message = model.GetInterimMessage(processedMessage.MessageId); message.Type = processedMessage.MessageType; message.Intent = processedMessage.MessageIntent; message.RelatedTo = processedMessage.RelatedTo; return(message); }
private async Task <bool> TrackMessageAsync(BookmarkInserted message) { var processedMessage = new ProcessedMessage( message.Id, typeof(BookmarkInsertedConsumer)); var processed = await _context.ProcessedMessages.AnyAsync(processedMessage.IsEqual()); if (!processed) { _context.ProcessedMessages.Add(processedMessage); } return(processed); }
public void CanAcceptMessageWithoutHeaders() { var endpointNodeStrategy = new LogicalRoutingNodeStrategy(); var messageNodeStrategy = new CollapseMessagesToSameReceiverMessageNodeStrategy(endpointNodeStrategy); var nodeStrategy = new NodeStrategy(endpointNodeStrategy, messageNodeStrategy); var modelBuilder = new ModelBuilder(nodeStrategy); var message = new ProcessedMessage { Headers = new Dictionary <string, string>() }; Assert.DoesNotThrow( () => modelBuilder.Accept(message) ); }
public void CanAcceptMessageWithoutHeaders() { var endpointNodeStrategy = new LogicalRoutingNodeStrategy(); var messageNodeStrategy = new CollapseMessagesToSameReceiverMessageNodeStrategy(endpointNodeStrategy); var nodeStrategy = new NodeStrategy(endpointNodeStrategy, messageNodeStrategy); var modelBuilder = new ModelBuilder(nodeStrategy); var message = new ProcessedMessage { Headers = new Dictionary<string, string>() }; Assert.DoesNotThrow( () => modelBuilder.Accept(message) ); }
public void Many_processed_messages_are_being_expired() { new ExpiryProcessedMessageIndex().Execute(documentStore); var processedMessage = new ProcessedMessage { Id = Guid.NewGuid().ToString(), ProcessedAt = DateTime.UtcNow.AddMinutes(-DateTime.UtcNow.Millisecond%30).AddDays(-(Settings.HoursToKeepMessagesBeforeExpiring*3)), }; var processedMessage2 = new ProcessedMessage { Id = "2", ProcessedAt = DateTime.UtcNow, }; using (var session = documentStore.OpenSession()) { for (var i = 0; i < 100; i++) { processedMessage = new ProcessedMessage { Id = Guid.NewGuid().ToString(), ProcessedAt = DateTime.UtcNow.AddMinutes(-DateTime.UtcNow.Millisecond%30).AddDays(-(Settings.HoursToKeepMessagesBeforeExpiring*3)), }; session.Store(processedMessage); } session.Store(processedMessage2); session.SaveChanges(); } documentStore.WaitForIndexing(); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 10); using (var session = documentStore.OpenSession()) { var results = session.Query<ProcessedMessage, ExpiryProcessedMessageIndex>().Customize(x => x.WaitForNonStaleResults()).ToArray(); Assert.AreEqual(1, results.Length); var msg = session.Load<ProcessedMessage>(processedMessage.Id); Assert.Null(msg, "Message with datestamp {0} and ID {1} was found", processedMessage.ProcessedAt, processedMessage.Id); msg = session.Load<ProcessedMessage>(processedMessage2.Id); Assert.NotNull(msg); } }
public void Many_processed_messages_are_being_expired() { new ExpiryProcessedMessageIndex().Execute(documentStore); var processedMessage = new ProcessedMessage { Id = Guid.NewGuid().ToString(), ProcessedAt = DateTime.UtcNow.AddMinutes(-DateTime.UtcNow.Millisecond % 30).AddDays(-(Settings.HoursToKeepMessagesBeforeExpiring * 3)), }; var processedMessage2 = new ProcessedMessage { Id = "2", ProcessedAt = DateTime.UtcNow, }; using (var session = documentStore.OpenSession()) { for (var i = 0; i < 100; i++) { processedMessage = new ProcessedMessage { Id = Guid.NewGuid().ToString(), ProcessedAt = DateTime.UtcNow.AddMinutes(-DateTime.UtcNow.Millisecond % 30).AddDays(-(Settings.HoursToKeepMessagesBeforeExpiring * 3)), }; session.Store(processedMessage); } session.Store(processedMessage2); session.SaveChanges(); } documentStore.WaitForIndexing(); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 10); using (var session = documentStore.OpenSession()) { var results = session.Query <ProcessedMessage, ExpiryProcessedMessageIndex>().Customize(x => x.WaitForNonStaleResults()).ToArray(); Assert.AreEqual(1, results.Length); var msg = session.Load <ProcessedMessage>(processedMessage.Id); Assert.Null(msg, "Message with datestamp {0} and ID {1} was found", processedMessage.ProcessedAt, processedMessage.Id); msg = session.Load <ProcessedMessage>(processedMessage2.Id); Assert.NotNull(msg); } }
public override async Task Invoke(IIncomingLogicalMessageContext context, Func <Task> next) { var dbContext = new OrdersDataContext(); ProcessedMessage processedMessage; using (var dbContextTransaction = dbContext .Database.BeginTransaction()) { processedMessage = await dbContext .ProcessedMessages .FirstOrDefaultAsync(m => m.MessageId == context.MessageId); if (processedMessage == null) { processedMessage = new ProcessedMessage { MessageId = context.MessageId }; dbContext.ProcessedMessages.Add(processedMessage); await dbContext.SaveChangesAsync() .ConfigureAwait(false); context.Extensions.Set(dbContext); await next().ConfigureAwait(false); //Process var serializedMessages = JsonConvert.SerializeObject(dbContext.OutgoingMessages, serializerSettings); processedMessage.OutgoingMessages = serializedMessages; await dbContext.SaveChangesAsync() .ConfigureAwait(false); } dbContextTransaction.Commit(); } var outgoingMessages = JsonConvert.DeserializeObject <List <OutgoingMessage> >(processedMessage.OutgoingMessages, serializerSettings); foreach (var outgoingMessage in outgoingMessages) { var publishOptions = new PublishOptions(); publishOptions.SetMessageId(outgoingMessage.MessageId); await context.Publish(outgoingMessage.Payload, publishOptions); } }
public void Many_documents_are_being_expired() { using (var documentStore = InMemoryStoreBuilder.GetInMemoryStore()) { var expiredDate = DateTime.UtcNow.AddDays(-3); var thresholdDate = DateTime.UtcNow.AddDays(-2); var recentDate = DateTime.UtcNow.AddDays(-1); var expiredMessages = BuildExpiredMessaged(expiredDate).ToList(); using (var session = documentStore.OpenSession()) { foreach (var message in expiredMessages) { session.Store(message); } session.SaveChanges(); } using (var session = documentStore.OpenSession()) { var recentMessage = new ProcessedMessage { Id = "recentMessageId", ProcessedAt = recentDate }; session.Store(recentMessage); session.SaveChanges(); } RunExpiry(documentStore, thresholdDate); foreach (dynamic message in expiredMessages) { using (var session = documentStore.OpenSession()) { Assert.Null(session.Load <ProcessedMessage>(message.Id)); } } using (var session = documentStore.OpenSession()) { Assert.AreEqual(1, session.Query <ProcessedMessage>().Count()); } } }
public static void Worker() { while (true) { List <MessageDto> messages = ChatService.GetAllMessages(); foreach (MessageDto message in messages) { if (!ProcessedMessage.Any(a => a.Id == message.Id)) { Console.WriteLine(message.SendDate.ToString("mm:ss") + " " + message.Nickname + ":" + message.Message); } } ProcessedMessage.AddRange(messages); Thread.Sleep(100); } }
public override void Process() { if (!GameManager.IsServer) { patch_LogicHashGen logicReader = ProcessedMessage.Find <patch_LogicHashGen>(LogicReaderId); SimpleFabricatorBase fabricator = ProcessedMessage.Find <SimpleFabricatorBase>(FabricatorId); if (logicReader == null || fabricator == null) { List <NetworkInstanceId> list = new List <NetworkInstanceId> { LogicReaderId, FabricatorId }; Singleton <GameManager> .Instance.NetworkManager.StartCoroutine(base.WaitUntilFound(Process, Process, list, 10f, "LogicHashGen")); } else { logicReader.CurrentFabricator = fabricator; } } }
///<summary>Deletes a processed message from the queue.</summary> ///<remarks>This is a non-blocking asynchronous method.</remarks> public void DeleteMessage(CloudQueueMessage message) { queue.BeginDeleteMessage(message, r => { queue.EndDeleteMessage(r); //I have no idea whether this is necessary. I'm //afraid that the Azure queue might return the //same message yet again just after deleting it Thread.Sleep(TimeSpan.FromSeconds(8)); var dataContext = tableClient.GetDataServiceContext(); var pm = new ProcessedMessage(queue, message); dataContext.AttachTo(TableName, pm, etag: "*"); //Since entities will never change, it's not worth storing the eTag. dataContext.DeleteObject(pm); dataContext.SaveChangesWithRetries(); }, null); }
private static bool CanAccept(ProcessedMessage message) { if (message.IsSystemMessage) { return(false); } var intent = message.MessageIntent; if (string.IsNullOrEmpty(message.MessageIntent)) { return(false); } if (intent == "Subscribe" || intent == "Unsubscribe") { return(false); } return(true); }
/// <summary> /// run this at the start of processing a message to see if we have already processed it. /// </summary> /// <param name="messageId">the message Id</param> /// <param name="topic">the topic of the message</param> /// <returns>true if the message as already been processed</returns> public virtual async Task <bool> HasProcessedMessage(string messageId, Type topic = null) { _messageInfo = $"{topic} - {messageId}"; var alreadyProcessed = await _session.HasProcessedMessage(messageId); if (alreadyProcessed) { _logger.LogInformation($"Already processed message, Id: {_messageInfo}"); return(true); } var markedProcessedMessage = new ProcessedMessage() { Id = messageId, TopicType = topic }; _processedMessage = markedProcessedMessage; _logger.LogDebug($"New message {_messageInfo}"); return(false); }
public void Only_processed_messages_are_being_expired() { using (var documentStore = InMemoryStoreBuilder.GetInMemoryStore()) { var expiredDate = DateTime.UtcNow.AddDays(-3); var thresholdDate = DateTime.UtcNow.AddDays(-2); var recentDate = DateTime.UtcNow.AddDays(-1); var expiredMessage = new ProcessedMessage { Id = "1", ProcessedAt = expiredDate }; using (var session = documentStore.OpenSession()) { session.Store(expiredMessage); session.SaveChanges(); } var recentMessage = new ProcessedMessage { Id = "2", ProcessedAt = recentDate }; using (var session = documentStore.OpenSession()) { session.Store(recentMessage); session.SaveChanges(); } RunExpiry(documentStore, thresholdDate); using (var session = documentStore.OpenSession()) { Assert.Null(session.Load <ProcessedMessage>(expiredMessage.Id)); Assert.NotNull(session.Load <ProcessedMessage>(recentMessage.Id)); } } }
public void Recent_processed_messages_are_not_being_expired() { using (var documentStore = InMemoryStoreBuilder.GetInMemoryStore()) { var thresholdDate = DateTime.UtcNow.AddDays(-2); var recentDate = DateTime.UtcNow.AddDays(-1); var message = new ProcessedMessage { Id = "1", ProcessedAt = recentDate }; using (var session = documentStore.OpenSession()) { session.Store(message); session.SaveChanges(); } RunExpiry(documentStore, thresholdDate); using (var session = documentStore.OpenSession()) { Assert.AreEqual(1, session.Query <ProcessedMessage>().Count()); } } }
public void Recent_processed_messages_are_not_being_expired() { var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = DateTime.UtcNow, }; using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.SaveChanges(); } WaitForIndexing(documentStore); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 2); using (var session = documentStore.OpenSession()) { var msg = session.Load<ProcessedMessage>(processedMessage.Id); Assert.NotNull(msg); } }
public void Accept(ProcessedMessage processedMessage) { if (!CanAccept(processedMessage)) { return; } var message = GetInterimMessage(processedMessage); var sendingEndpoint = model.GetEndpoint(processedMessage.SendingEndpoint.Name); sendingEndpoint.Sends(message); var receivingEndpoint = model.GetEndpoint(processedMessage.ReceivingEndpoint.Name); receivingEndpoint.Receives(message); if (!string.IsNullOrEmpty(processedMessage.Context)) { var context = model.GetContext(processedMessage.Context); context.Contains(message); } }
public void Handle(ImportSuccessfullyProcessedMessage message) { var auditMessage = new ProcessedMessage(message); Session.Store(auditMessage); }
public static void MakeSystemMessage(this ProcessedMessage message, bool isSystem = true) { message.MessageMetadata["IsSystemMessage"] = isSystem; }
public static void SetMessageId(this ProcessedMessage message, string messageId) { message.MessageMetadata["MessageId"] = messageId; }
public void Handle(ProcessedMessage message) { database.Add(new MessageData(message.SendingEndpoint, message.ProcessingEndpoint)); }
void BatchImporter() { String failedMessageID = null; try { Logger.DebugFormat("Batch job started", Task.CurrentId); var moreMessages = 0; using (var queueReceiver = CreateReceiver()) { do { if (moreMessages > RampUpConcurrencyMagicNumber) { if (TryStartNewBatchImporter()) { Logger.Debug("We have too many messages, starting another batch importer"); moreMessages = 0; //Reset to 0 so we only ramp up once per BatchImporter } } moreMessages++; using (var msmqTransaction = new MessageQueueTransaction()) { msmqTransaction.Begin(); using (var bulkInsert =store.BulkInsert(options:new BulkInsertOptions {CheckForUpdates = true})) { for (var idx = 0; idx < BatchSize; idx++) { Message message = null; TransportMessage transportMessage; try { message = queueReceiver.Receive(receiveTimeout, msmqTransaction); performanceCounters.MessageDequeued(); transportMessage = MsmqUtilities.Convert(message); } catch (MessageQueueException mqe) { if (mqe.MessageQueueErrorCode == MessageQueueErrorCode.IOTimeout) { moreMessages = 0; break; } throw; } catch (Exception) { if (message != null) { failedMessageID = message.Id; } throw; } try { var importSuccessfullyProcessedMessage = new ImportSuccessfullyProcessedMessage(transportMessage); foreach (var enricher in enrichers) { enricher.Enrich(importSuccessfullyProcessedMessage); } var auditMessage = new ProcessedMessage(importSuccessfullyProcessedMessage); bulkInsert.Store(auditMessage); performanceCounters.MessageProcessed(); if (Settings.ForwardAuditMessages == true) { Forwarder.Send(transportMessage, Settings.AuditLogQueue); } } catch (Exception) { if (message != null) { failedMessageID = message.Id; } throw; } } } msmqTransaction.Commit(); } } while (moreMessages > 0 && !stopping); } Logger.Debug("Stopping batch importer"); } finally { if (!String.IsNullOrEmpty(failedMessageID)) { // Call RetryMessageImportById outside the Task as it checks for running tasks ThreadPool.QueueUserWorkItem(state => RetryMessageImportById(failedMessageID)); } countDownEvent.Decrement(); } }
void RetryMessageImportById(string messageID) { // Try to get the batchErrorLock, if we can't then exit, // the message will trigger a retry next time on the next batch read. // Retrymessage may be fired again for the same message until the batches drain so this // prevents the message being processed twice, if (Monitor.TryEnter(batchErrorLockObj)) { try { Logger.DebugFormat("Drain stop running batch importers"); stopping = true; var runningTasks = batchTaskTracker.Active(); Task.WaitAll(runningTasks); var commitTransaction = false; using (var queueReceiver = CreateReceiver()) using (var msmqTransaction = new MessageQueueTransaction()) { msmqTransaction.Begin(); Logger.DebugFormat("Retry import of messageID - {0}", messageID); try { Message message; TransportMessage transportMessage; try { message = queueReceiver.ReceiveById(messageID); performanceCounters.MessageDequeued(); } catch (Exception exception) { importFailuresHandler.FailedToReceive(exception); //logs and increments circuit breaker return; } try { transportMessage = MsmqUtilities.Convert(message); } catch (Exception convertException) { importFailuresHandler.FailedToReceive(convertException); //logs and increments circuit breaker serviceControlErrorQueue.Send(message, msmqTransaction); // Send unconvertable message to SC's ErrorQueue so it's not lost commitTransaction = true; // Can't convert the messsage, so commit to get message out of the queue return; } try { var importSuccessfullyProcessedMessage = new ImportSuccessfullyProcessedMessage(transportMessage); foreach (var enricher in enrichers) { enricher.Enrich(importSuccessfullyProcessedMessage); } using (var session = store.OpenSession()) { var auditMessage = new ProcessedMessage(importSuccessfullyProcessedMessage); session.Store(auditMessage); session.SaveChanges(); } performanceCounters.MessageProcessed(); if (Settings.ForwardAuditMessages == true) { Forwarder.Send(transportMessage, Settings.AuditLogQueue); } commitTransaction = true; } catch (Exception importException) { importFailuresHandler.Log(transportMessage, importException); //Logs and Writes failure transport message to Raven } } finally { if (commitTransaction) { msmqTransaction.Commit(); } } } } finally { Monitor.Exit(batchErrorLockObj); //Restart Batch mode stopping = false; Logger.Debug("Ready to BeginPeek again"); queuePeeker.BeginPeek(); } } }
void RetryMessageImportById(string messageID) { // Try to get the batchErrorLock, if we can't then exit, // the message will trigger a retry next time on the next batch read. // Retrymessage may be fired again for the same message until the batches drain so this // prevents the message being processed twice, if (Monitor.TryEnter(batchErrorLockObj)) { try { Logger.DebugFormat("Drain stop running batch importers"); stopping = true; var runningTasks = batchTaskTracker.Active(); Task.WaitAll(runningTasks); var commitTransaction = false; using (var queueReceiver = CreateReceiver()) using (var msmqTransaction = new MessageQueueTransaction()) { msmqTransaction.Begin(); Logger.DebugFormat("Retry import of messageID - {0}", messageID); try { Message message; TransportMessage transportMessage; try { message = queueReceiver.ReceiveById(messageID); performanceCounters.MessageDequeued(); } catch (Exception exception) { importFailuresHandler.FailedToReceive(exception); //logs and increments circuit breaker return; } try { transportMessage = MsmqUtilities.Convert(message); } catch (Exception convertException) { importFailuresHandler.FailedToReceive(convertException); //logs and increments circuit breaker serviceControlErrorQueue.Send(message, msmqTransaction); // Send unconvertable message to SC's ErrorQueue so it's not lost commitTransaction = true; // Can't convert the messsage, so commit to get message out of the queue return; } try { var importSuccessfullyProcessedMessage = new ImportSuccessfullyProcessedMessage(transportMessage); foreach (var enricher in enrichers) { enricher.Enrich(importSuccessfullyProcessedMessage); } using (var session = store.OpenSession()) { var auditMessage = new ProcessedMessage(importSuccessfullyProcessedMessage); session.Store(auditMessage); session.SaveChanges(); } performanceCounters.MessageProcessed(); if (Settings.ForwardAuditMessages == true) { Forwarder.Send(transportMessage, Settings.AuditLogQueue); } commitTransaction = true; } catch (Exception importException) { importFailuresHandler.Log(transportMessage, importException); //Logs and Writes failure transport message to Raven } } finally { if (commitTransaction) { msmqTransaction.Commit(); } } } } finally { Monitor.Exit(batchErrorLockObj); //Restart Batch mode stopping = false; Logger.Debug("Ready to BeginPeek again"); queuePeeker.BeginPeek(); } } }
void BatchImporter() { String failedMessageID = null; try { Logger.DebugFormat("Batch job started", Task.CurrentId); var moreMessages = 0; using (var queueReceiver = CreateReceiver()) { do { if (moreMessages > RampUpConcurrencyMagicNumber) { if (TryStartNewBatchImporter()) { Logger.Debug("We have too many messages, starting another batch importer"); moreMessages = 0; //Reset to 0 so we only ramp up once per BatchImporter } } moreMessages++; using (var msmqTransaction = new MessageQueueTransaction()) { msmqTransaction.Begin(); using (var bulkInsert = store.BulkInsert(options: new BulkInsertOptions { CheckForUpdates = true })) { for (var idx = 0; idx < BatchSize; idx++) { Message message = null; TransportMessage transportMessage; try { message = queueReceiver.Receive(receiveTimeout, msmqTransaction); performanceCounters.MessageDequeued(); transportMessage = MsmqUtilities.Convert(message); } catch (MessageQueueException mqe) { if (mqe.MessageQueueErrorCode == MessageQueueErrorCode.IOTimeout) { moreMessages = 0; break; } throw; } catch (Exception) { if (message != null) { failedMessageID = message.Id; } throw; } try { var importSuccessfullyProcessedMessage = new ImportSuccessfullyProcessedMessage(transportMessage); foreach (var enricher in enrichers) { enricher.Enrich(importSuccessfullyProcessedMessage); } var auditMessage = new ProcessedMessage(importSuccessfullyProcessedMessage); bulkInsert.Store(auditMessage); performanceCounters.MessageProcessed(); if (Settings.ForwardAuditMessages == true) { Forwarder.Send(transportMessage, Settings.AuditLogQueue); } } catch (Exception) { if (message != null) { failedMessageID = message.Id; } throw; } } } msmqTransaction.Commit(); } } while (moreMessages > 0 && !stopping); } Logger.Debug("Stopping batch importer"); } finally { if (!String.IsNullOrEmpty(failedMessageID)) { // Call RetryMessageImportById outside the Task as it checks for running tasks ThreadPool.QueueUserWorkItem(state => RetryMessageImportById(failedMessageID)); } countDownEvent.Decrement(); } }
public void Stored_bodies_are_being_removed_when_message_expires() { // Store expired message with associated body var messageId = "21"; var bodyStorage = new RavenAttachmentsBodyStorage { DocumentStore = documentStore }; var processedMessage = new ProcessedMessage { Id = "1", ProcessedAt = DateTime.UtcNow.AddHours(-(Settings.HoursToKeepMessagesBeforeExpiring * 2)) }; processedMessage.SetMessageId(messageId); using (var session = documentStore.OpenSession()) { session.Store(processedMessage); session.SaveChanges(); } var body = new byte[] { 1, 2, 3, 4, 5 }; using (var stream = new MemoryStream(body)) bodyStorage.Store(messageId, "binary", 5, stream); // Wait for expiry documentStore.WaitForIndexing(); Thread.Sleep(Settings.ExpirationProcessTimerInSeconds * 1000 * 2); // Verify message expired using (var session = documentStore.OpenSession()) { var msg = session.Load<ProcessedMessage>(processedMessage.Id); Assert.Null(msg, "Audit document should be deleted"); } // Verify body expired Stream dummy; var bodyFound = bodyStorage.TryFetch(messageId, out dummy); Assert.False(bodyFound, "Audit document body should be deleted"); }