/// <summary> /// 格式化表格模板 /// </summary> /// <param name="tableTemplate"></param> /// <param name="modelProcessor"></param> /// <param name="model"></param> /// <returns></returns> private Table CreateTableTemplateItem(TemplateItem tableTemplate, IModelProcessor modelProcessor, object model) { var table = new Table(); table.Margin = tableTemplate.Margin; table.Padding = tableTemplate.Padding; if (!string.IsNullOrWhiteSpace(tableTemplate.FontFamily)) { table.FontFamily = new Typeface(tableTemplate.FontFamily).FontFamily; } if (tableTemplate.Size > 0) { table.FontSize = tableTemplate.Size; } SetControlFontStyle(tableTemplate.FontStyle, table); SetControlFontWeight(tableTemplate.FontWeight, table); var items = modelProcessor.GetItems(tableTemplate.BindingData, model); var rowGroup = new TableRowGroup(); var header = new TableRow();//create header foreach (var item in tableTemplate.Children) { var cell = new TableCell(); if (item.TableHeader != null) { var content = CreateTextTemplateItem(item.TableHeader, modelProcessor, model); var textContainer = new Paragraph(); textContainer.Inlines.Add(content); cell.Blocks.Add(textContainer); } if (item.InTableColumnSpan > 0) { cell.ColumnSpan = item.InTableColumnSpan; } header.Cells.Add(cell); } rowGroup.Rows.Add(header); foreach (var context in items) //create Item { var row = new TableRow(); foreach (var item in tableTemplate.Children) { var content = CreateTextTemplateItem(item, modelProcessor, context); var textContainer = new Paragraph(); textContainer.Inlines.Add(content); var cell = new TableCell(); if (item.InTableColumnSpan > 0) { cell.ColumnSpan = item.InTableColumnSpan; } cell.Blocks.Add(textContainer); row.Cells.Add(cell); } rowGroup.Rows.Add(row); } table.RowGroups.Add(rowGroup); return(table); }
/// <summary> /// 模板格式化为FlowDocument /// </summary> /// <param name="modelProcessor">数据模型处理器</param> /// <param name="tpl">模板</param> /// <param name="model">数据模型</param> /// <returns>FlowDocument文档</returns> public FlowDocument CreateDocument(IModelProcessor modelProcessor, Template tpl, object model) { if (tpl.Items == null || tpl.Items.Count == 0) { return(null); } var section = new Section(); foreach (var item in tpl.Items) { switch (item.ItemType) { case TemplateItemType.Text: { var pagePanel = new Paragraph(); var content = CreateTextTemplateItem(item, modelProcessor, model); if (content != null) { pagePanel.Inlines.Add(content); } section.Blocks.Add(pagePanel); } break; case TemplateItemType.Table: { var table = CreateTableTemplateItem(item, modelProcessor, model); section.Blocks.Add(table); } break; case TemplateItemType.Line: { var line = CreateLineTemplateItem(item); section.Blocks.Add(line); } break; case TemplateItemType.Image: { var pagePanel = new Paragraph(); var img = CreateImageTemplateItem(item); if (img != null) { pagePanel.Inlines.Add(img); } section.Blocks.Add(pagePanel); } break; } } FlowDocument doc = CreateDoc(tpl); doc.Blocks.Add(section); return(doc); }
/// <summary> /// Reads the vertex buffers for a model from a stream and passes them to an IModelProcessor. /// </summary> /// <param name="reader">The stream to read the vertex buffers from.</param> /// <param name="model">The model's metadata.</param> /// <param name="sectionsToRead"> /// A BitArray controlling which sections to read. Indices which are set to to true will be /// read. /// </param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to.</param> private static void ReadVertexBuffers(IReader reader, IRenderModel model, BitArray sectionsToRead, EngineDescription buildInfo, IModelProcessor processor) { for (int i = 0; i < model.Sections.Length; i++) ReadSectionVertices(reader, model.Sections[i], model.BoundingBoxes[0], buildInfo, sectionsToRead[i] ? processor : null); }
/// <summary> /// Reads the resource data for a model from a stream and passes it to an IModelProcessor. /// </summary> /// <param name="reader">The stream to read the model data from.</param> /// <param name="model">The model's metadata.</param> /// <param name="sectionsToRead">A BitArray controlling which sections to read. Indices which are set to to true will be read.</param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to.</param> public static void ReadModelData(IReader reader, IRenderModel model, BitArray sectionsToRead, BuildInformation buildInfo, IModelProcessor processor) { processor.BeginModel(model); ReadVertexBuffers(reader, model, sectionsToRead, buildInfo, processor); ReadIndexBuffers(reader, model, sectionsToRead, buildInfo, processor); processor.EndModel(model); }
/// <summary> /// Reads the vertex buffers for a model from a stream and passes them to an IModelProcessor. /// </summary> /// <param name="reader">The stream to read the vertex buffers from.</param> /// <param name="model">The model's metadata.</param> /// <param name="sectionsToRead"> /// A BitArray controlling which sections to read. Indices which are set to to true will be /// read. /// </param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to.</param> private static void ReadVertexBuffers(IReader reader, IRenderModel model, BitArray sectionsToRead, EngineDescription buildInfo, IModelProcessor processor) { for (int i = 0; i < model.Sections.Length; i++) { ReadSectionVertices(reader, model.Sections[i], model.BoundingBoxes[0], buildInfo, sectionsToRead[i] ? processor : null); } }
/// <summary> /// Reads the resource data for a model from a stream and passes it to an IModelProcessor. /// </summary> /// <param name="reader">The stream to read the model data from.</param> /// <param name="model">The model's metadata.</param> /// <param name="sectionsToRead"> /// A BitArray controlling which sections to read. Indices which are set to to true will be /// read. /// </param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to.</param> public static void ReadModelData(IReader reader, IRenderModel model, BitArray sectionsToRead, EngineDescription buildInfo, IModelProcessor processor) { processor.BeginModel(model); ReadVertexBuffers(reader, model, sectionsToRead, buildInfo, processor); ReadIndexBuffers(reader, model, sectionsToRead, buildInfo, processor); processor.EndModel(model); }
public void Initialize(NetWorkSetting networkSetting, IModelProcessor processor, string requestID, bool isRequestView) { this.networkSetting = networkSetting; this.processor = processor; this.requestId = requestID; this.isRequestView = isRequestView; if (isRequestView) { RemoveSelectColumn(); } }
/// <summary> /// Reads the index buffers for a model from a stream and passes them to an IModelProcessor. /// </summary> /// <param name="reader">The stream to read the index buffers from.</param> /// <param name="model">The model's metadata.</param> /// <param name="sectionsToRead">A BitArray controlling which sections to read. Indices which are set to to true will be read.</param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to.</param> private static void ReadIndexBuffers(IReader reader, IRenderModel model, BitArray sectionsToRead, BuildInformation buildInfo, IModelProcessor processor) { int baseIndex = 0; for (int i = 0; i < model.Sections.Length; i++) { IModelSection section = model.Sections[i]; ReadSectionIndices(reader, section, baseIndex, buildInfo, sectionsToRead[i] ? processor : null); if (sectionsToRead[i]) baseIndex += CountVertices(section); } }
//[TestMethod] public void Request() { try { string requestToken; IModelProcessor processor = PostRequest(out requestToken); Assert.AreEqual(REQUEST_ID, requestToken); processor.Close("1"); } catch (Exception e) { Assert.Fail(e.Message, e); } }
string ProcessRequest(HubRequest request, IModelProcessor processor, NetWorkSetting networkSetting) { try { Document[] requestDocuments = request.Documents.Select(d => new Lpp.Dns.DataMart.Model.Document(d.ID.ToString("D"), d.Document.MimeType, d.Document.Name) { IsViewable = d.Document.IsViewable, Size = Convert.ToInt32(d.Document.Size), Kind = d.Document.Kind }).ToArray(); Document[] desiredDocuments; string requestId = request.Source.ID.ToString(); IDictionary <string, string> requestProperties; processor.Request(requestId, networkSetting.CreateInterfaceMetadata(), request.CreateInterfaceMetadata(), requestDocuments, out requestProperties, out desiredDocuments); _log.Info("Request posted: " + request.Source.Identifier + " (ID: " + requestId + ")"); _log.Info("Number of documents available: " + requestDocuments.Length); _log.Info("Number of documents desired: " + desiredDocuments.Length); if (requestProperties != null && requestProperties.Count > 0) { _log.Info("Properties: "); foreach (string key in requestProperties.Keys) { _log.Info("Key: " + key + "=" + requestProperties[key]); } } // TODO[ddee] Needs to update the requestProperties here, but do not have a proper status. // Temporarily using InProgress. // BMS: Don't report inprogress status until portal is fixed to display status in routings // DnsServiceManager.SetRequestStatus(request, DnsServiceManager.ConvertModelRequestStatus(processor.Status(requestId)), requestProperties, networkSetting); foreach (Lpp.Dns.DataMart.Model.Document requestDocument in desiredDocuments) { _log.Info("About to post desired document id: " + requestDocument.DocumentID); DocumentChunkStream requestDocumentStream = new DocumentChunkStream(Guid.Parse(requestDocument.DocumentID), networkSetting); processor.RequestDocument(requestId, requestDocument.DocumentID, requestDocumentStream); _log.Info("Posted desired document id: " + requestDocument.DocumentID); } _log.Info("Starting request with local request: " + request.Source.Identifier + " (ID: " + requestId + ")"); processor.Start(requestId); _log.Info("Start finished on request with local request: " + request.Source.Identifier + " (ID: " + requestId + ")"); return(requestId); } catch (Exception ex) { _log.Error("Unexpected exception in Util.ProcessRequest.", ex); throw; } }
/// <summary> /// Reads the index buffers for a model from a stream and passes them to an IModelProcessor. /// </summary> /// <param name="reader">The stream to read the index buffers from.</param> /// <param name="model">The model's metadata.</param> /// <param name="sectionsToRead"> /// A BitArray controlling which sections to read. Indices which are set to to true will be /// read. /// </param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to.</param> private static void ReadIndexBuffers(IReader reader, IRenderModel model, BitArray sectionsToRead, EngineDescription buildInfo, IModelProcessor processor) { int baseIndex = 0; for (int i = 0; i < model.Sections.Length; i++) { IModelSection section = model.Sections[i]; ReadSectionIndices(reader, section, baseIndex, buildInfo, sectionsToRead[i] ? processor : null); if (sectionsToRead[i]) { baseIndex += CountVertices(section); } } }
/// <summary> /// Reads the index buffer for a section in a model. /// </summary> /// <param name="reader">The stream to read the index buffer from.</param> /// <param name="section">The model section that the index buffer belongs to.</param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor">The IModelProcessor to pass the read model data to, or null if the index buffer should be skipped over.</param> private static void ReadSectionIndices(IReader reader, IModelSection section, int baseIndex, BuildInformation buildInfo, IModelProcessor processor) { foreach (IModelSubmesh submesh in section.Submeshes) { if (processor != null) { ushort[] indices = IndexBufferReader.ReadIndexBuffer(reader, submesh.IndexBufferCount); processor.ProcessSubmeshIndices(submesh, indices, baseIndex); } else { IndexBufferReader.SkipIndexBuffer(reader, submesh.IndexBufferCount); } } reader.SeekTo((reader.Position + 3) & ~3); // Align 4 }
public void Add(Guid modelId, Guid dataMartId, IModelProcessor processor) { IDictionary <Guid, IModelProcessor> dataMartProcessors; if (modelDataMartProcessors.ContainsKey(modelId)) { dataMartProcessors = modelDataMartProcessors[modelId]; } else { dataMartProcessors = new Dictionary <Guid, IModelProcessor>(); modelDataMartProcessors.Add(modelId, dataMartProcessors); } dataMartProcessors.Add(dataMartId, processor); }
/// <summary> /// Reads the index buffer for a section in a model. /// </summary> /// <param name="reader">The stream to read the index buffer from.</param> /// <param name="section">The model section that the index buffer belongs to.</param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="processor"> /// The IModelProcessor to pass the read model data to, or null if the index buffer should be /// skipped over. /// </param> private static void ReadSectionIndices(IReader reader, IModelSection section, int baseIndex, EngineDescription buildInfo, IModelProcessor processor) { foreach (IModelSubmesh submesh in section.Submeshes) { if (processor != null) { ushort[] indices = IndexBufferReader.ReadIndexBuffer(reader, submesh.IndexBufferCount); processor.ProcessSubmeshIndices(submesh, indices, baseIndex); } else { IndexBufferReader.SkipIndexBuffer(reader, submesh.IndexBufferCount); } } reader.SeekTo((reader.Position + 3) & ~3); // Align 4 }
private string ProcessRequest(HubRequest request, IModelProcessor processor) { try { Document[] requestDocuments = request.Documents.Select(d => new Lpp.Dns.DataMart.Model.Document(d.ID.ToString("D"), d.Document.MimeType, d.Document.Name) { IsViewable = d.Document.IsViewable, Size = Convert.ToInt32(d.Document.Size), Kind = d.Document.Kind }).ToArray(); Document[] desiredDocuments; string requestId = request.Source.ID.ToString(); IDictionary <string, string> requestProperties; processor.Request(requestId, _networkSetting.CreateInterfaceMetadata(), request.CreateInterfaceMetadata(), requestDocuments, out requestProperties, out desiredDocuments); Log.Info("Request posted: " + request.Source.Identifier + " (ID: " + requestId + ")"); Log.Info("Number of documents available: " + requestDocuments.Length); Log.Info("Number of documents desired: " + desiredDocuments.Length); if (requestProperties != null && requestProperties.Count > 0) { Log.Info("Properties: "); foreach (string key in requestProperties.Keys) { Log.Info("Key: " + key + "=" + requestProperties[key]); } } foreach (Lpp.Dns.DataMart.Model.Document requestDocument in desiredDocuments) { Log.Debug("Downloading document" + requestDocument.Filename + $" for Request: {request.Source.MSRequestID}, DataMart: { request.DataMartName }"); DocumentChunkStream requestDocumentStream = new DocumentChunkStream(Guid.Parse(requestDocument.DocumentID), _networkSetting); processor.RequestDocument(requestId, requestDocument.DocumentID, requestDocumentStream); Log.Debug("Successfully Downloaded document" + requestDocument.Filename + $" for Request: {request.Source.MSRequestID}, DataMart: { request.DataMartName }"); } Log.Info("Starting request with local request: " + request.Source.Identifier + " (ID: " + requestId + ")"); processor.Start(requestId); Log.Info("Start finished on request with local request: " + request.Source.Identifier + " (ID: " + requestId + ")"); return(requestId); } catch (Exception ex) { Log.Error("Unexpected exception in Util.ProcessRequest.", ex); throw; } }
/// <summary> /// Reads the vertex buffer for a section in a model. /// </summary> /// <param name="reader">The stream to read the vertex buffer from.</param> /// <param name="section">The model section that the vertex buffer belongs to.</param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="boundingBox">The bounding box for the model section.</param> /// <param name="processor"> /// The IModelProcessor to pass the read model data to, or null if the vertex buffer should be /// skipped over. /// </param> private static void ReadSectionVertices(IReader reader, IModelSection section, BoundingBox boundingBox, EngineDescription buildInfo, IModelProcessor processor) { VertexLayout layout = buildInfo.VertexLayouts.GetLayout(section.VertexFormat); foreach (IModelSubmesh submesh in section.Submeshes) { if (processor != null) processor.BeginSubmeshVertices(submesh); VertexBufferReader.ReadVertices(reader, layout, submesh.VertexBufferCount, boundingBox, processor); if (processor != null) processor.EndSubmeshVertices(submesh); } VertexBufferReader.SkipExtraElements(reader, section); }
public ApiResponse Create(List <ModelParam> param) { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { Response.text = JsonConverter.JsonConverter.ObjToJson(ModelProcessor.Create(param)); Response.result = true; return(Response); } catch { Response.text = "Something went wrong :("; Response.result = false; return(Response); } }
public ApiResponse ListAll() { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { ModelProcessor.Find(); Response.text = JsonConverter.JsonConverter.ObjToJson(ModelProcessor.Find()); Response.result = true; return(Response); } catch { Response.text = "Unfortunately something went wrong :("; Response.result = false; return(Response); } }
public ApiResponse DeleteById(long id) { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { ModelProcessor.Delete(id); Response.text = "Entity was successfully removed from the system."; Response.result = true; return(Response); } catch { Response.text = "Unfortunately something went wrong :("; Response.result = false; return(Response); } }
public ApiResponse Update(long id, ModelParam param) { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { ModelProcessor.Update(id, param); Response.text = "Entity was successfully updated"; Response.result = true; return(Response); } catch { Response.text = "Unfortunately something went wrong :("; Response.result = false; return(Response); } }
public ApiResponse Delete(List <long> idList) { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { ModelProcessor.Delete(idList); Response.text = "Entity was successfully removed from the system."; Response.result = true; return(Response); } catch { Response.text = "Unfortunately something went wrong. Try again later. :)"; Response.result = false; return(Response); } }
public ApiResponse FindByPK(long id) { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { ModelProcessor.Find(id); Response.text = "Account with this PK has been found" + Environment.NewLine + JsonConverter.JsonConverter.ObjToJson(ModelProcessor.Find(id)); Response.result = true; return(Response); } catch { Response.text = "An account with this id does not exist"; Response.result = false; return(Response); } }
/// <summary> /// /// </summary> /// <param name="request"></param> /// <returns></returns> public DocsPaVO.Modelli.ModelResponse ProcessModel(DocsPaVO.Modelli.ModelRequest request) { DocsPaVO.Modelli.ModelResponse response = null; try { IModelProcessor processor = this.CreateInstance(request.ModelType); //return processor.ProcessModel(request); response = processor.ProcessModel(request); } catch (Exception ex) { response = new DocsPaVO.Modelli.ModelResponse { DocumentId = request.DocumentId, Exception = ex.Message, }; } return(response); }
/// <summary> /// Reads the vertex buffer for a section in a model. /// </summary> /// <param name="reader">The stream to read the vertex buffer from.</param> /// <param name="section">The model section that the vertex buffer belongs to.</param> /// <param name="buildInfo">Information about the cache file's target engine.</param> /// <param name="boundingBox">The bounding box for the model section.</param> /// <param name="processor"> /// The IModelProcessor to pass the read model data to, or null if the vertex buffer should be /// skipped over. /// </param> private static void ReadSectionVertices(IReader reader, IModelSection section, BoundingBox boundingBox, EngineDescription buildInfo, IModelProcessor processor) { VertexLayout layout = buildInfo.VertexLayouts.GetLayout(section.VertexFormat); foreach (IModelSubmesh submesh in section.Submeshes) { if (processor != null) { processor.BeginSubmeshVertices(submesh); } VertexBufferReader.ReadVertices(reader, layout, submesh.VertexBufferCount, boundingBox, processor); if (processor != null) { processor.EndSubmeshVertices(submesh); } } VertexBufferReader.SkipExtraElements(reader, section); }
public ApiResponse Update(List <ModelParam> param) { ModelProcessor = new ModelProcessor(); Response = new ApiResponse(); try { ModelProcessor.Update(param); Response.text = "Entities were successfully updated."; Response.result = true; return(Response); } catch { Response.text = "Unfortunately something went wrong :("; Response.result = false; return(Response); } }
/// <summary> /// 格式化文本模板 /// </summary> /// <param name="itemTpl"></param> /// <param name="modelProcessor"></param> /// <param name="currentDataContext"></param> /// <returns></returns> private Inline CreateTextTemplateItem(TemplateItem itemTpl, IModelProcessor modelProcessor, object currentDataContext) { var content = modelProcessor.GetBindingValue(itemTpl.BindingData, currentDataContext); var text = new Run(content); var textPanel = new TextBlock(text); if (!string.IsNullOrWhiteSpace(itemTpl.FontFamily)) { textPanel.FontFamily = new Typeface(itemTpl.FontFamily).FontFamily; } if (itemTpl.Size > 0) { textPanel.FontSize = itemTpl.Size; } textPanel.TextWrapping = TextWrapping.WrapWithOverflow; textPanel.Padding = itemTpl.Padding; textPanel.Margin = itemTpl.Margin; SetControlFontStyle(itemTpl.FontStyle, textPanel); SetControlFontWeight(itemTpl.FontWeight, textPanel); var container = new InlineUIContainer(textPanel); return(container); }
public static void UpdateProcessorSettings(ModelDescription modelDescription, IModelProcessor processor) { Dictionary <string, object> settings = new Dictionary <string, object>(); if (modelDescription.Properties != null && modelDescription.Properties.Count > 0) { foreach (PropertyData property in modelDescription.Properties) { settings.Add(property.Name, property.Value); } if (!settings.ContainsKey("AppFolderPath")) { settings.Add("AppFolderPath", Lpp.Dns.DataMart.Client.Utils.Configuration.AppFolderPath); } } processor.Settings = settings; }
public InputProcessor(IModelProcessor modelProcessor, string fileName, Func <string, StreamReader> streamReaderFactory) { _modelProcessor = modelProcessor; _fileName = fileName; _streamReaderFactory = streamReaderFactory; }
private void SettingsForm_Load(object sender, EventArgs e) { Lpp.Dns.DataMart.Client.DomainManger.DomainManager domainManager = new DomainManger.DomainManager(Configuration.PackagesFolderPath); try { if (ModelDescription == null) { return; } if (ModelDescription.ProcessorId == Guid.Empty) { ModelDescription.ProcessorId = HubModel.ModelProcessorId; } //get the package identifier and version var packageIdentifier = DnsServiceManager.GetRequestTypeIdentifier(NetworkSetting, ModelDescription.ModelId, ModelDescription.ProcessorId); if (!System.IO.File.Exists(System.IO.Path.Combine(Configuration.PackagesFolderPath, packageIdentifier.PackageName()))) { DnsServiceManager.DownloadPackage(NetworkSetting, packageIdentifier); } domainManager.Load(packageIdentifier.Identifier, packageIdentifier.Version); modelProcessor = domainManager.GetProcessor(ModelDescription.ProcessorId); ProcessorManager.UpdateProcessorSettings(ModelDescription, modelProcessor); if (modelProcessor is IEarlyInitializeModelProcessor) { ((IEarlyInitializeModelProcessor)modelProcessor).Initialize(ModelDescription.ModelId, Array.Empty <DocumentWithStream>()); } this.SuspendLayout(); if (modelProcessor.ModelMetadata.Settings == null || !modelProcessor.ModelMetadata.Settings.Any()) { var noSettingsLabel = new Label(); noSettingsLabel.Text = "This model processor does not have any settings."; noSettingsLabel.Anchor = AnchorStyles.Top | AnchorStyles.Right | AnchorStyles.Bottom | AnchorStyles.Left; noSettingsLabel.TextAlign = ContentAlignment.MiddleCenter; tableLayoutPanel1.Controls.Add(noSettingsLabel, 0, 0); tableLayoutPanel1.SetColumnSpan(noSettingsLabel, 2); } else { _editors = new HashSet <Control>(); tableLayoutPanel1.RowStyles.Clear(); int rowIndex = 0; IEnumerable <Lpp.Dns.DataMart.Model.Settings.ProcessorSetting> settings = modelProcessor.ModelMetadata.Settings; if (modelProcessor.ModelMetadata.SQlProviders != null && modelProcessor.ModelMetadata.SQlProviders.Any()) { DataMart.Client.Controls.DataSourceEditor dsEditor = new Controls.DataSourceEditor(modelProcessor.ModelMetadata, ModelDescription.Properties); dsEditor.Anchor = AnchorStyles.Top | AnchorStyles.Right | AnchorStyles.Bottom | AnchorStyles.Left; tableLayoutPanel1.RowStyles.Add(new RowStyle(SizeType.AutoSize)); tableLayoutPanel1.Controls.Add(dsEditor, 0, rowIndex++); tableLayoutPanel1.SetColumnSpan(dsEditor, 2); settings = settings.Where(s => !Lpp.Dns.DataMart.Model.Settings.ProcessorSettings.IsDbSetting(s.Key)).ToArray(); _editors.Add(dsEditor); } settings.ToList().ForEach(s => { string value = ModelDescription.Properties.Where(p => string.Equals(p.Name, s.Key, StringComparison.OrdinalIgnoreCase)).Select(p => p.Value).FirstOrDefault(); if (string.IsNullOrEmpty(value) && s.Required && !string.IsNullOrEmpty(s.DefaultValue)) { value = s.DefaultValue; } Label lbl = new Label(); lbl.AutoSize = true; lbl.Anchor = AnchorStyles.Right; lbl.TextAlign = ContentAlignment.MiddleRight; lbl.Text = s.Title.EndsWith(":") ? s.Title : s.Title + ":"; Control editor = null; if (s.ValidValues != null) { ComboBox combo = new ComboBox(); combo.DropDownStyle = ComboBoxStyle.DropDownList; combo.Anchor = AnchorStyles.Right | AnchorStyles.Left; combo.Name = s.Key; combo.Items.AddRange(s.ValidValues.Select(v => new PropertyData(v.Key, v.Value.ToString())).ToArray()); if (!string.IsNullOrEmpty(value)) { foreach (PropertyData p in combo.Items) { if (string.Equals(p.Value, value, StringComparison.OrdinalIgnoreCase)) { combo.SelectedItem = p; break; } } } if (combo.SelectedIndex < 0) { combo.SelectedIndex = 0; } editor = combo; } else { if (s.ValueType == typeof(bool) || s.ValueType == typeof(Nullable <bool>)) { CheckBox chkbox = new CheckBox(); chkbox.Anchor = AnchorStyles.Left; chkbox.Text = s.Title; chkbox.TextAlign = ContentAlignment.MiddleLeft; chkbox.AutoSize = true; if (!string.IsNullOrEmpty(value)) { bool isChecked = false; bool.TryParse(value, out isChecked); chkbox.Checked = isChecked; } editor = chkbox; lbl = null; } else if (s.ValueType == typeof(Lpp.Dns.DataMart.Model.Settings.FilePickerEditor)) { SelectFileButton btn = new SelectFileButton(s.EditorSettings as Lpp.Dns.DataMart.Model.Settings.FilePickerEditor); if (btn.Multiselect) { btn.FileNames = ((value ?? "").Trim(',')).Split(','); } else { btn.FileName = value; } btn.Anchor = AnchorStyles.Right | AnchorStyles.Left; editor = btn; } else if (s.ValueType == typeof(Lpp.Dns.DataMart.Model.Settings.FolderSelectorEditor)) { SelectFolderButton btn = new SelectFolderButton(s.EditorSettings as Lpp.Dns.DataMart.Model.Settings.FolderSelectorEditor); btn.FolderPath = value; btn.Anchor = AnchorStyles.Right | AnchorStyles.Left; editor = btn; } else { TextBox txtbox = new TextBox(); txtbox.Anchor = AnchorStyles.Right | AnchorStyles.Left; txtbox.Text = value; editor = txtbox; } } if (editor != null) { editor.Tag = s.Key; _editors.Add(editor); tableLayoutPanel1.RowStyles.Add(new RowStyle(SizeType.AutoSize)); if (lbl != null) { tableLayoutPanel1.Controls.Add(lbl, 0, rowIndex); tableLayoutPanel1.Controls.Add(editor, 1, rowIndex++); } else { tableLayoutPanel1.Controls.Add(editor, 0, rowIndex++); tableLayoutPanel1.SetColumnSpan(editor, 2); } } }); //add auto expanding row to bottom to fill empty space Label emptyLabel = new Label(); emptyLabel.Anchor = AnchorStyles.Top | AnchorStyles.Right | AnchorStyles.Bottom | AnchorStyles.Left; emptyLabel.Text = string.Empty; tableLayoutPanel1.RowStyles.Add(new RowStyle(SizeType.Percent, 100f)); tableLayoutPanel1.Controls.Add(emptyLabel, 0, rowIndex); tableLayoutPanel1.SetColumnSpan(emptyLabel, 2); } this.ResumeLayout(true); } catch (Exception ex) { log.Error(ex); Close(); } finally { domainManager.Dispose(); this.Cursor = Cursors.Default; } }
protected override void OnDoWork(DoWorkEventArgs e) { _log.Info("Automated processing worker started."); while (true) { _log.Info("Checking Master Request Queue for requests requiring automated processing."); SystemTray.UpdateNotificationIcon(IconType.IconBusy, "Processing Requests"); var reqs = from ns in Configuration.Instance.NetworkSettingCollection.NetWorkSettings.Cast <NetWorkSetting>().ToObservable() where ns.NetworkStatus == Util.ConnectionOKStatus let dmIds = ns.DataMartList // BMS: Note the ProcessAndNotUpload feature has been temporarilty disabled until we fix the processing around this feature .Where(dm => dm.AllowUnattendedOperation && (dm.NotifyOfNewQueries || /* dm.ProcessQueriesAndNotUpload || */ dm.ProcessQueriesAndUploadAutomatically)) .Select(dm => dm.DataMartId) .ToArray() where dmIds.Any() from list in DnsServiceManager.GetRequestList(ns, 0, Properties.Settings.Default.AutoProcessingBatchSize, new RequestFilter { Statuses = new [] { Lpp.Dns.DTO.DataMartClient.Enums.DMCRoutingStatus.Submitted }, DataMartIds = dmIds }, null, null) from rl in list.Segment.EmptyIfNull().ToObservable() where rl.AllowUnattendedProcessing from r in RequestCache.ForNetwork(ns).LoadRequest(rl.ID, rl.DataMartID) select new { Request = r, NetworkSetting = ns }; reqs .Do(r => { var request = r.Request; var datamartDescription = Configuration.Instance.GetDataMartDescription(request.NetworkId, request.DataMartId); var modelDescription = Configuration.Instance.GetModelDescription(request.NetworkId, request.DataMartId, request.Source.ModelID); var packageIdentifier = new Lpp.Dns.DTO.DataMartClient.RequestTypeIdentifier { Identifier = request.Source.RequestTypePackageIdentifier, Version = request.Source.AdapterPackageVersion }; if (!System.IO.File.Exists(System.IO.Path.Combine(Configuration.PackagesFolderPath, packageIdentifier.PackageName()))) { DnsServiceManager.DownloadPackage(r.NetworkSetting, packageIdentifier); } using (var domainManager = new DomainManger.DomainManager(Configuration.PackagesFolderPath)) { domainManager.Load(request.Source.RequestTypePackageIdentifier, request.Source.AdapterPackageVersion); IModelProcessor processor = domainManager.GetProcessor(modelDescription.ProcessorId); ProcessorManager.UpdateProcessorSettings(modelDescription, processor); if (processor is IEarlyInitializeModelProcessor) { ((IEarlyInitializeModelProcessor)processor).Initialize(modelDescription.ModelId, request.Documents.Select(d => new DocumentWithStream(d.ID, new Document(d.ID, d.Document.MimeType, d.Document.Name, d.Document.IsViewable, Convert.ToInt32(d.Document.Size), d.Document.Kind), new DocumentChunkStream(d.ID, r.NetworkSetting))).ToArray()); } if (processor != null && processor.ModelMetadata != null && processor.ModelMetadata.Capabilities != null && processor.ModelMetadata.Capabilities.ContainsKey("CanRunAndUpload") && !(bool)processor.ModelMetadata.Capabilities["CanRunAndUpload"]) { //can't be run, don't attempt autoprocessing return; } request.Processor = processor; if (request.RoutingStatus == Lpp.Dns.DTO.DataMartClient.Enums.DMCRoutingStatus.Submitted) { if (processor != null) { SystemTray.generate_notification(request, request.NetworkId); if (datamartDescription.NotifyOfNewQueries) { SystemTray.UpdateNotificationIcon(IconType.IconBusy, string.Format("Query Submitted by {0}", request.Source.Author.Username)); } else { processor.SetRequestProperties(request.Source.ID.ToString(), request.Properties); ProcessRequest(request); var statusCode = request.Processor.Status(request.Source.ID.ToString()).Code; if (datamartDescription.ProcessQueriesAndUploadAutomatically && (statusCode == RequestStatus.StatusCode.Complete || statusCode == RequestStatus.StatusCode.CompleteWithMessage)) { // Post process requests that are automatically uploaded processor.PostProcess(request.Source.ID.ToString()); // Increment counter _queriesProcessedCount++; SystemTray.update_notify_text(_queriesProcessedCount, request.DataMartName, request.NetworkId); UploadRequest(request); } } } } else if (request.RoutingStatus == Lpp.Dns.DTO.DataMartClient.Enums.DMCRoutingStatus.AwaitingResponseApproval) { if (datamartDescription.ProcessQueriesAndUploadAutomatically) { // Increment counter _queriesProcessedCount++; SystemTray.update_notify_text(_queriesProcessedCount, request.DataMartName, request.NetworkId); UploadRequest(request); } } } }) .LogExceptions(_log.Error) .Catch() .LastOrDefault(); SystemTray.UpdateNotificationIcon(IconType.IconDefault, null); Thread.Sleep(DMClient.Properties.Settings.Default.RefreshRate); } }
private void StartProcessingRequest(HubRequest request, IModelProcessor processor, DataMartDescription datamartDescription, DomainManger.DomainManager domainManager, Lib.Caching.DocumentCacheManager cache) { Action process = () => { processor.SetRequestProperties(request.Source.ID.ToString(), request.Properties); ProcessRequest(request); }; Action <Task> continuation = (completed) => { HubRequestStatus hubRequestStatus = null; var statusCode = request.Processor.Status(request.Source.ID.ToString()).Code; if (cache.Enabled) { Document[] responseDocuments = processor.Response(request.Source.ID.ToString()); cache.Add(responseDocuments.Select(doc => { System.IO.Stream data; processor.ResponseDocument(request.Source.ID.ToString(), doc.DocumentID, out data, doc.Size); Guid documentID; if (!Guid.TryParse(doc.DocumentID, out documentID)) { documentID = Utilities.DatabaseEx.NewGuid(); doc.DocumentID = documentID.ToString(); } return(new DocumentWithStream(documentID, doc, data)); })); } if (datamartDescription.ProcessQueriesAndNotUpload && (statusCode == RequestStatus.StatusCode.Complete || statusCode == RequestStatus.StatusCode.CompleteWithMessage)) { RequestStatuses.TryAdd(MakeKey(request), ProcessingStatus.PendingUpload); } else if (datamartDescription.ProcessQueriesAndUploadAutomatically && (statusCode == RequestStatus.StatusCode.Complete || statusCode == RequestStatus.StatusCode.CompleteWithMessage)) { // Post process requests that are automatically uploaded processor.PostProcess(request.Source.ID.ToString()); if (cache.Enabled) { cache.ClearCache(); Document[] responseDocuments = processor.Response(request.Source.ID.ToString()); cache.Add(responseDocuments.Select(doc => { System.IO.Stream data; processor.ResponseDocument(request.Source.ID.ToString(), doc.DocumentID, out data, doc.Size); Guid documentID; if (!Guid.TryParse(doc.DocumentID, out documentID)) { documentID = Utilities.DatabaseEx.NewGuid(); doc.DocumentID = documentID.ToString(); } return(new DocumentWithStream(documentID, doc, data)); })); } // Increment counter System.Threading.Interlocked.Increment(ref _queriesProcessedCount); statusCode = request.Processor.Status(request.Source.ID.ToString()).Code; if (statusCode == RequestStatus.StatusCode.Error) { hubRequestStatus = DnsServiceManager.ConvertModelRequestStatus(request.Processor.Status(request.Source.ID.ToString())); hubRequestStatus.Message = request.Processor.Status(request.Source.ID.ToString()).Message; } else if (statusCode == RequestStatus.StatusCode.Complete || statusCode == RequestStatus.StatusCode.CompleteWithMessage) { SystemTray.UpdateNotifyText(_queriesProcessedCount, request.DataMartName, request.NetworkId); try { UploadRequest(request, cache); statusCode = request.Processor.Status(request.Source.ID.ToString()).Code; hubRequestStatus = DnsServiceManager.ConvertModelRequestStatus(request.Processor.Status(request.Source.ID.ToString())); hubRequestStatus.Message = request.Processor.Status(request.Source.ID.ToString()).Message; } catch (Exception ex) { string message = string.Format("An error occurred while attempting unattended processing of the following query {0} (ID: {1}, DataMart: {2}, Network: {3})", request.Source.Identifier, request.Source.ID, request.DataMartName, request.NetworkName); Log.Error(message, ex); hubRequestStatus = new HubRequestStatus(Lpp.Dns.DTO.DataMartClient.Enums.DMCRoutingStatus.Failed, message); } } else { statusCode = request.Processor.Status(request.Source.ID.ToString()).Code; hubRequestStatus = DnsServiceManager.ConvertModelRequestStatus(request.Processor.Status(request.Source.ID.ToString())); hubRequestStatus.Message = request.Processor.Status(request.Source.ID.ToString()).Message; } DnsServiceManager.SetRequestStatus(request, hubRequestStatus, request.Properties, _networkSetting); Log.Info(string.Format("BackgroundProcess: Finished Processing / Uploading results for query {0} (RequestID: {3}, DataMart: {1}, Network: {2})", request.Source.Identifier, request.DataMartName, request.NetworkName, request.Source.ID)); if (statusCode == RequestStatus.StatusCode.Error || statusCode == RequestStatus.StatusCode.Complete || statusCode == RequestStatus.StatusCode.CompleteWithMessage || statusCode == RequestStatus.StatusCode.AwaitingResponseApproval) { //mark auto-processing complete in the status cache if the processing resulted in either an error or a completed status var key = MakeKey(request); if (RequestStatuses.ContainsKey(key)) { RequestStatuses[key] = ProcessingStatus.Complete; } } } }; RunTask(MakeKey(request), domainManager, process, continuation); }
private void AutoProcess(KeyValuePair <string, HubRequest> input) { var request = input.Value; var datamartDescription = Configuration.Instance.GetDataMartDescription(request.NetworkId, request.DataMartId); if (datamartDescription.ProcessQueriesAndUploadAutomatically == false && datamartDescription.ProcessQueriesAndNotUpload == false) { //just notify, do not process string message = $"New query submitted and awaiting processing in { request.ProjectName } Project: { request.Source.Name } ({request.Source.Identifier})"; SystemTray.DisplayNewQueryNotificationToolTip(message); RequestStatuses.TryAdd(input.Key, ProcessingStatus.CannotRunAndUpload); return; } var modelDescription = Configuration.Instance.GetModelDescription(request.NetworkId, request.DataMartId, request.Source.ModelID); var packageIdentifier = new Lpp.Dns.DTO.DataMartClient.RequestTypeIdentifier { Identifier = request.Source.RequestTypePackageIdentifier, Version = request.Source.AdapterPackageVersion }; if (!System.IO.File.Exists(System.IO.Path.Combine(Configuration.PackagesFolderPath, packageIdentifier.PackageName()))) { DnsServiceManager.DownloadPackage(_networkSetting, packageIdentifier); } var domainManager = new DomainManger.DomainManager(Configuration.PackagesFolderPath); try { domainManager.Load(request.Source.RequestTypePackageIdentifier, request.Source.AdapterPackageVersion); IModelProcessor processor = domainManager.GetProcessor(modelDescription.ProcessorId); ProcessorManager.UpdateProcessorSettings(modelDescription, processor); processor.Settings.Add("NetworkId", request.NetworkId); Lib.Caching.DocumentCacheManager cache = new Lib.Caching.DocumentCacheManager(request.NetworkId, request.DataMartId, request.Source.ID, request.Source.Responses.Where(x => x.DataMartID == request.DataMartId).FirstOrDefault().ResponseID); //need to initialize before checking the capabilities and settings of the processor since they may change based on the type of request being sent. if (processor is IEarlyInitializeModelProcessor) { ((IEarlyInitializeModelProcessor)processor).Initialize(modelDescription.ModelId, request.Documents.Select(d => new DocumentWithStream(d.ID, new Document(d.ID, d.Document.MimeType, d.Document.Name, d.Document.IsViewable, Convert.ToInt32(d.Document.Size), d.Document.Kind), new DocumentChunkStream(d.ID, _networkSetting))).ToArray()); } if (processor != null && processor.ModelMetadata != null && processor.ModelMetadata.Capabilities != null && processor.ModelMetadata.Capabilities.ContainsKey("CanRunAndUpload") && !(bool)processor.ModelMetadata.Capabilities["CanRunAndUpload"]) { //can't be run, don't attempt autoprocessing RequestStatuses.TryAdd(input.Key, ProcessingStatus.CannotRunAndUpload); domainManager.Dispose(); return; } request.Processor = processor; if (cache.HasResponseDocuments == false && (request.RoutingStatus == Lpp.Dns.DTO.DataMartClient.Enums.DMCRoutingStatus.Submitted || request.RoutingStatus == DTO.DataMartClient.Enums.DMCRoutingStatus.Resubmitted)) { if (processor != null) { SystemTray.GenerateNotification(request, request.NetworkId); StartProcessingRequest(request, processor, datamartDescription, domainManager, cache); return; } } else if (request.RoutingStatus == Lpp.Dns.DTO.DataMartClient.Enums.DMCRoutingStatus.AwaitingResponseApproval) { if (datamartDescription.ProcessQueriesAndUploadAutomatically) { // Increment counter System.Threading.Interlocked.Increment(ref _queriesProcessedCount); SystemTray.UpdateNotifyText(_queriesProcessedCount, request.DataMartName, request.NetworkId); StartUploadingRequest(request, domainManager, cache); return; } } else if (cache.HasResponseDocuments) { RequestStatuses.TryAdd(input.Key, ProcessingStatus.PendingUpload); } domainManager.Dispose(); } catch (Exception ex) { Log.Error($"Error autoprocessing Request: { request.Source.Identifier }, DataMartId: { request.DataMartId }, NetworkId: {request.NetworkId}", ex); domainManager.Dispose(); throw; } }