bool TryStoreBody(ImportMessage message, int bodySize, string contentType) { var bodyId = message.MessageId; var storedInBodyStorage = false; var bodyUrl = string.Format("/messages/{0}/body", bodyId); var isFailedMessage = message is ImportFailedMessage; var isBinary = contentType.Contains("binary"); var isBelowMaxSize = bodySize <= Settings.MaxBodySizeToStore; var avoidsLargeObjectHeap = bodySize < LargeObjectHeapThreshold; if (isFailedMessage || isBelowMaxSize) { bodyUrl = StoreBodyInBodyStorage(message, bodyId, contentType, bodySize); storedInBodyStorage = true; } if (isBelowMaxSize && avoidsLargeObjectHeap && !isBinary) { message.Metadata.Add("Body", Encoding.UTF8.GetString(message.PhysicalMessage.Body)); } message.Metadata.Add("BodyUrl", bodyUrl); return storedInBodyStorage; }
static int GetContentLength(ImportMessage message) { if (message.PhysicalMessage.Body == null) { return 0; } return message.PhysicalMessage.Body.Length; }
string StoreBodyInBodyStorage(ImportMessage message, string bodyId, string contentType, int bodySize) { using (var bodyStream = new MemoryStream(message.PhysicalMessage.Body)) { var bodyUrl = BodyStorage.Store(bodyId, contentType, bodySize, bodyStream); return bodyUrl; } }
public override void Enrich(ImportMessage message) { var processingEnded = DateTime.MinValue; var timeSent = DateTime.MinValue; var processingStarted = DateTime.MinValue; string timeSentValue; if (message.PhysicalMessage.Headers.TryGetValue(Headers.TimeSent, out timeSentValue)) { timeSent = DateTimeExtensions.ToUtcDateTime(timeSentValue); message.Metadata.Add("TimeSent", timeSent); } string processingStartedValue; if (message.PhysicalMessage.Headers.TryGetValue(Headers.ProcessingStarted, out processingStartedValue)) { processingStarted = DateTimeExtensions.ToUtcDateTime(processingStartedValue); } string processingEndedValue; if (message.PhysicalMessage.Headers.TryGetValue(Headers.ProcessingEnded, out processingEndedValue)) { processingEnded = DateTimeExtensions.ToUtcDateTime(processingEndedValue); } var criticalTime = TimeSpan.Zero; if (processingEnded != DateTime.MinValue && timeSent != DateTime.MinValue) { criticalTime = processingEnded - timeSent; } message.Metadata.Add("CriticalTime", criticalTime); var processingTime = TimeSpan.Zero; if (processingEnded != DateTime.MinValue && processingStarted != DateTime.MinValue) { processingTime = processingEnded - processingStarted; } message.Metadata.Add("ProcessingTime", processingTime); var deliveryTime = TimeSpan.Zero; if (processingStarted != DateTime.MinValue && timeSent != DateTime.MinValue) { deliveryTime = processingStarted - timeSent; } message.Metadata.Add("DeliveryTime", deliveryTime); }
static string GetContentType(ImportMessage message, string defaultContentType) { string contentType; if (!message.PhysicalMessage.Headers.TryGetValue(Headers.ContentType, out contentType)) { contentType = defaultContentType; } return contentType; }
public override void Enrich(ImportMessage message) { var status = GetLicenseStatus(message.PhysicalMessage.Headers); var endpoint = EndpointDetailsParser.ReceivingEndpoint(message.PhysicalMessage.Headers); // The ReceivingEndpoint will be null for messages from v3.3.x endpoints that were successfully // processed because we dont have the information from the relevant headers. if (endpoint != null) { LicenseStatusKeeper.Set(endpoint.Name + endpoint.Host, status); } }
public override void Enrich(ImportMessage message) { string conversationId; if (message.PhysicalMessage.Headers.TryGetValue(NServiceBus.Headers.ConversationId, out conversationId)) { message.Metadata.Add("ConversationId", conversationId); } string relatedToId; if (message.PhysicalMessage.Headers.TryGetValue(NServiceBus.Headers.RelatedTo, out relatedToId)) { message.Metadata.Add("RelatedToId", relatedToId); } }
public override void Enrich(ImportMessage message) { var bodySize = GetContentLength(message); message.Metadata.Add("ContentLength", bodySize); if (bodySize == 0) { return; } var contentType = GetContentType(message, "text/xml"); message.Metadata.Add("ContentType", contentType); var stored = TryStoreBody(message, bodySize, contentType); if (!stored) { message.Metadata.Add("BodyNotStored", true); } }
public override void Enrich(ImportMessage message) { if (message.PhysicalMessage.Headers.ContainsKey(NServiceBus.Headers.ControlMessageHeader)) { message.Metadata.Add("IsSystemMessage", true); message.Metadata.Add("MessageType", "SystemMessage"); } string enclosedMessageTypes; if (message.PhysicalMessage.Headers.TryGetValue(NServiceBus.Headers.EnclosedMessageTypes, out enclosedMessageTypes)) { var messageType = GetMessageType(enclosedMessageTypes); message.Metadata.Add("IsSystemMessage", DetectSystemMessage(messageType)); message.Metadata.Add("MessageType", messageType); message.Metadata.Add("SearchableMessageType", messageType.Replace(".", " ").Replace("+", " ")); } }
public override void Enrich(ImportMessage message) { if (message.PhysicalMessage.Body == null || message.PhysicalMessage.Body.Length == 0) { message.Metadata.Add("ContentLength", 0); return; } string contentType; if (!message.PhysicalMessage.Headers.TryGetValue(Headers.ContentType, out contentType)) { contentType = "text/xml"; //default to xml for now } message.Metadata.Add("ContentType", contentType); var bodySize = message.PhysicalMessage.Body.Length; var bodyId = message.MessageId; if (message is ImportFailedMessage) { using (var bodyStream = new MemoryStream(message.PhysicalMessage.Body)) { var bodyUrl = BodyStorage.Store(bodyId, contentType, bodySize, bodyStream); message.Metadata.Add("BodyUrl", bodyUrl); } } else { var bodyUrl = string.Format("/messages/{0}/body", bodyId); message.Metadata.Add("BodyUrl", bodyUrl); } if (!contentType.Contains("binary") && bodySize <= MaxBodySizeToStore) { message.Metadata.Add("Body", System.Text.Encoding.UTF8.GetString(message.PhysicalMessage.Body)); } message.Metadata.Add("ContentLength", bodySize); }
public override void Enrich(ImportMessage message) { if (!(message is ImportSuccessfullyProcessedMessage)) { return; } string retryId; var hasBeenRetried = message.PhysicalMessage.Headers.TryGetValue("ServiceControl.RetryId", out retryId); message.Metadata.Add("IsRetried", hasBeenRetried); if (!hasBeenRetried) { return; } Bus.SendLocal(new RegisterSuccessfulRetry { FailedMessageId = message.UniqueMessageId, RetryId = Guid.Parse(retryId) }); }
public ImportMessage AddMessage(string message, ImportMessageType severity, ImportRowInfo affectedRow = null, string affectedField = null) { var msg = new ImportMessage(message, severity); msg.AffectedItem = affectedRow; msg.AffectedField = affectedField; this.Messages.Add(msg); return msg; }
public abstract void Enrich(ImportMessage message);
public override void Enrich(ImportMessage message) { string sagasInvokedRaw; if (message.PhysicalMessage.Headers.TryGetValue("NServiceBus.InvokedSagas", out sagasInvokedRaw)) { string sagasChangeRaw; var sagasChanges = new Dictionary<string, string>(); if (message.PhysicalMessage.Headers.TryGetValue("ServiceControl.SagaStateChange", out sagasChangeRaw)) { var multiSagaChanges = sagasChangeRaw.Split(';'); foreach (var part in multiSagaChanges.Select(s => s.Split(':'))) { sagasChanges.Add(part[0], part[1]); } } var sagas = sagasInvokedRaw.Split(';') .Select(saga => { var sagaInvoked = saga.Split(':'); string changeText; sagasChanges.TryGetValue(sagaInvoked[1], out changeText); return new SagaInfo { SagaId = Guid.Parse(sagaInvoked[1]), SagaType = sagaInvoked[0], ChangeStatus = changeText, }; }) .ToList(); message.Metadata.Add("InvokedSagas", sagas); } else { string sagaId; //for backwards compatibility if (message.PhysicalMessage.Headers.TryGetValue(NServiceBus.Headers.SagaId, out sagaId)) { var sagaType = message.PhysicalMessage.Headers[NServiceBus.Headers.SagaType].Split(',').First(); message.Metadata.Add("InvokedSagas", new List<SagaInfo>{new SagaInfo{SagaId = Guid.Parse(sagaId),SagaType =sagaType}}); } } string originatingSagaId; if (message.PhysicalMessage.Headers.TryGetValue(NServiceBus.Headers.OriginatingSagaId, out originatingSagaId)) { var sagaType = message.PhysicalMessage.Headers[NServiceBus.Headers.OriginatingSagaType].Split(',').First(); message.Metadata.Add("OriginatesFromSaga", new SagaInfo { SagaId = Guid.Parse(originatingSagaId), SagaType = sagaType }); } }
public async void ProcessUploads( [QueueTrigger("imports-pending")] ImportMessage message, ExecutionContext executionContext, [Queue("imports-pending")] ICollector <ImportMessage> importStagesMessageQueue, [Queue("imports-available")] ICollector <ImportObservationsMessage> importObservationsMessageQueue ) { try { var status = await _importStatusService.GetImportStatus(message.Release.Id, message.DataFileName); _logger.LogInformation($"Processor Function processing import message for " + $"{message.DataFileName} at stage {status.Status}"); switch (status.Status) { case IStatus.CANCELLING: _logger.LogInformation($"Import for {message.DataFileName} is in the process of being " + $"cancelled, so not processing to the next import stage - marking as " + $"CANCELLED"); await _importStatusService.UpdateStatus(message.Release.Id, message.DataFileName, IStatus.CANCELLED, 100); break; case IStatus.CANCELLED: _logger.LogInformation($"Import for {message.DataFileName} is cancelled, so not " + $"processing any further"); break; case IStatus.QUEUED: case IStatus.PROCESSING_ARCHIVE_FILE: { if (message.ArchiveFileName != "") { _logger.LogInformation($"Unpacking archive for {message.DataFileName}"); await _processorService.ProcessUnpackingArchive(message); } await _importStatusService.UpdateStatus(message.Release.Id, message.DataFileName, IStatus.STAGE_1); importStagesMessageQueue.Add(message); break; } case IStatus.STAGE_1: await _processorService.ProcessStage1(message, executionContext); await _importStatusService.UpdateStatus(message.Release.Id, message.DataFileName, IStatus.STAGE_2); importStagesMessageQueue.Add(message); break; case IStatus.STAGE_2: await _processorService.ProcessStage2(message); await _importStatusService.UpdateStatus(message.Release.Id, message.DataFileName, IStatus.STAGE_3); importStagesMessageQueue.Add(message); break; case IStatus.STAGE_3: await _processorService.ProcessStage3(message); await _importStatusService.UpdateStatus(message.Release.Id, message.DataFileName, IStatus.STAGE_4); importStagesMessageQueue.Add(message); break; case IStatus.STAGE_4: await _processorService.ProcessStage4Messages(message, importObservationsMessageQueue); break; } } catch (Exception e) { var ex = GetInnerException(e); await _batchService.FailImport(message.Release.Id, message.DataFileName, new List <ValidationError> { new ValidationError(ex.Message) }); _logger.LogError(ex, $"{GetType().Name} function FAILED for : Datafile: " + $"{message.DataFileName} : {ex.Message}"); _logger.LogError(ex.StackTrace); } }
private bool LoadObjectFromSap(BackgroundWorker bw, bool Saved = true) { List<Code> list = new List<Code>(); Dictionary<int, Code> pos = new Dictionary<int, Code>(); this.AbapCode = new SAPINT.Utils.ABAPCode(SapSysName); var itemCount = 0; var processed = 0; var m_isCacelling = false; foreach (DataRow item in dt.Rows) { if (this.backgroundWorker1.CancellationPending) { m_isCacelling = true; } if (String.IsNullOrWhiteSpace(item["Select"].ToString())) { continue; } var isSelect = (bool)item["Select"]; if (isSelect) { var obj = item["NAME"].ToString(); if (String.IsNullOrEmpty(obj)) { continue; } var codes = AbapCode.GetSourceCode(obj); var sb = new StringBuilder(); codes.ForEach(x => sb.AppendLine(x)); item["Lines"] = codes.Count; var abapCode = sb.ToString(); item["Length"] = Util.FileUtil.FormatFileSize(abapCode.Length); item["Header"] = abapCode; if (String.IsNullOrEmpty(abapCode)) { continue; } var comments = new StringBuilder(); foreach (var comment in codes) { if (comment.StartsWith("*")) { comments.AppendLine(comment); } if (comments.ToString().ToList().Count > 10) { break; } } Code code = new Code(); code.Content = abapCode; code.Desc = comments.ToString(); code.Title = obj; if (!String.IsNullOrEmpty(this.TreeId)) { code.TreeId = TreeId; } list.Add(code); var index = int.Parse(item["Index"].ToString()); pos.Add(index, code); var pTime = new System.TimeSpan(pauseTime); Thread.Sleep(pTime);//必须暂停,因为在SAP系统中读取程序不会那么快。 //每100行保存一次 itemCount++; if (itemCount == 100 && dt.Rows.Count >= 100 && m_isCacelling == false) { if (true == Saved) { this.cbxDbSources.Enabled = false; db.SaveCodeList(list); this.cbxDbSources.Enabled = true; } foreach (var positem in pos) { dt.Rows[positem.Key]["CodeId"] = positem.Value.Id; dt.Rows[positem.Key]["Select"] = false; } pos.Clear(); list.Clear(); itemCount = 0; } else { if (true == Saved) { this.cbxDbSources.Enabled = false; db.SaveCodeList(list); this.cbxDbSources.Enabled = true; } foreach (var positem in pos) { dt.Rows[positem.Key]["CodeId"] = positem.Value.Id; dt.Rows[positem.Key]["Select"] = false; } pos.Clear(); list.Clear(); itemCount = 0; } processed++; ImportMessage m = new ImportMessage() { current = processed, total = selectedItems, progName = obj }; bw.ReportProgress(processed, m); //如果要取消,保存后退出. if (m_isCacelling == true) { break; } } } return true; //if (db.SaveCodeList(list)) //{ // foreach (var item in pos) // { // dt.Rows[item.Key]["CodeId"] = item.Value.Id; // } // return true; //} //else //{ // return false; //} }