public override void ProcessMessage(BasicDeliverEventArgs ea) { Encoding enc = Encoding.UTF8; MessageHeader header; try { if (ea.BasicProperties.ContentEncoding != null) { enc = Encoding.GetEncoding(ea.BasicProperties.ContentEncoding); } header = new MessageHeader(ea.BasicProperties.Headers, enc); header.Log(Logger, NLog.LogLevel.Trace, "Received"); } catch (Exception e) { Logger.Error("Message header content was null, or could not be parsed into a MessageHeader object: " + e); DiscardSingleMessage(ea.DeliveryTag); return; } LastHeader = header; LastArgs = ea; if (AcceptNext) { Ack(header, ea.DeliveryTag); AcceptNext = false; return; } ErrorAndNack(header, ea.DeliveryTag, "Message rejected!", null); }
public virtual void ProcessMessage(BasicDeliverEventArgs deliverArgs) { lock (_oConsumeLock) { if (_exiting) { return; } } // Handled by RabbitMQ adapter in normal operation - only an issue in testing I think if (Model == null) { throw new NullReferenceException("Model not set - use SetModel before processing messages"); } // If we did not receive a valid header, ditch the message and continue. // Control messages (no header) are handled in their own ProcessMessage implementation Encoding enc = Encoding.UTF8; MessageHeader header; try { if (deliverArgs.BasicProperties.ContentEncoding != null) { enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); } header = new MessageHeader(deliverArgs.BasicProperties.Headers, enc); header.Log(Logger, LogLevel.Trace, "Received"); } catch (Exception e) { Logger.Error("Message header content was null, or could not be parsed into a MessageHeader object: " + e); DiscardSingleMessage(deliverArgs.DeliveryTag); return; } // Now pass the message on to the implementation, catching and calling Fatal on any unhandled exception try { if (!SafeDeserializeToMessage <TMessage>(header, deliverArgs, out TMessage message)) { return; } ProcessMessageImpl(header, message, deliverArgs.DeliveryTag); } catch (Exception e) { Fatal("ProcessMessageImpl threw unhandled exception", e); } }
public override void ProcessMessage(BasicDeliverEventArgs deliverArgs) { Encoding enc = Encoding.UTF8; MessageHeader header; try { if (deliverArgs.BasicProperties.ContentEncoding != null) { enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); } header = new MessageHeader(deliverArgs.BasicProperties.Headers, enc); header.Log(Logger, NLog.LogLevel.Trace, "Received"); } catch (Exception e) { Logger.Error("Message header content was null, or could not be parsed into a MessageHeader object: " + e); DiscardSingleMessage(deliverArgs.DeliveryTag); return; } //Bug: RabbitMQ lib doesn't properly handle the ReplyTo address being null, causing the mapping to MongoDB types to throw an exception if (deliverArgs.BasicProperties.ReplyTo == null) { deliverArgs.BasicProperties.ReplyTo = ""; } RabbitMqXDeathHeaders deathHeaders; try { deathHeaders = new RabbitMqXDeathHeaders(deliverArgs.BasicProperties.Headers, Encoding.UTF8); } catch (ArgumentException) { _deadLetterStore.SendToGraveyard(deliverArgs, header, "Message contained invalid x-death entries"); Ack(header, deliverArgs.DeliveryTag); return; } if (deathHeaders.XDeaths[0].Count - 1 >= _maxRetryLimit) { _deadLetterStore.SendToGraveyard(deliverArgs, header, "MaxRetryCount exceeded"); Ack(header, deliverArgs.DeliveryTag); return; } try { _deadLetterStore.PersistMessageToStore(deliverArgs, header, _defaultRetryAfter); } catch (Exception e) { _deadLetterStore.SendToGraveyard(deliverArgs, header, "Exception when storing message", e); } Ack(header, deliverArgs.DeliveryTag); }
public override void AddToWriteQueue(DicomFileMessage message, IMessageHeader header, ulong deliveryTag) { // Only time we are not processing is if we are shutting down anyway if (IsStopping) { return; } if (Model == null) { throw new ApplicationException("Model needs to be set before messages can be processed"); } DicomDataset dataset; try { dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); } catch (Exception e) { throw new ApplicationException("Could not deserialize json to dataset", e); } BsonDocument datasetDoc; try { datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument(dataset); } catch (Exception e) { throw new ApplicationException("Exception converting dataset to BsonDocument", e); } // Generate a new header to record the current service before storing in MongoDB var newHeader = new MessageHeader(header); newHeader.Log(Logger, LogLevel.Trace, MongoLogMessage); BsonDocument bsonHeader = MongoDocumentHeaders.ImageDocumentHeader(message, newHeader); BsonDocument document = new BsonDocument() .Add("header", bsonHeader) .AddRange(datasetDoc); int docByteLength = document.ToBson().Length; if (docByteLength > MaxDocumentSize) { throw new ApplicationException("BsonDocument was larger than the max allowed size (have " + docByteLength + ", max is " + MaxDocumentSize + ")"); } var forceProcess = false; lock (LockObj) { ToProcess.Enqueue(new Tuple <BsonDocument, ulong>(document, deliveryTag)); if (ToProcess.Count >= MaxQueueSize) { forceProcess = true; } } if (!forceProcess) { return; } Logger.Debug("ImageMessageProcessor: Max queue size reached, calling ProcessQueue"); ProcessQueue(); }