public void LoadDataInternal(string dataFile, Field schema) { try { logger.Info("Loading DataFile: {0}", dataFile); //Now module will push items to it as soon as it receives items //Make changes for it if (DataFactory != null) { DataFactory.Dispose(); } DataFactory = new NodeIndexer(); DataFactory.OnNodeParsed += DataFactory_NodeParsed; DataFactory.OnIndexingCompleted += DataFactory_NodeIndexingCompleted; DataFactory.OnError = OnError; // string dataFile = @"C:\Application_Testing\shakeel\Customer_data_XML_1.xml"; //string schemaFile = @"C:\Application_Testing\shakeel\xml_schema.xml"; // Schema = Field.Parse(schemaFile); // DataFile = dataFile; DataFactory.LoadData(dataFile, schema); //xmlOutputControl.RefreshData(); } catch (Exception ex) { logger.Error(ex); MessageService.ShowError("Data Loading Error", ex.Message); } }
public override bool Run(WorkInfo workInfo) { NodeIndexer indexer = null; bool bStatus = false; try { logger.Info("Running_workflow"); workInfo.Log(this.DisplayName, NLog.LogLevel.Info, "Running Module"); if (this.OutputNode.State == null) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, "XML Module OutputNode.State is null"); return(false); } if (this.OutputNode.State.Schema == null) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, "OutputNode.State.Schema is null"); return(false); } if (this.OutputNode.State.DataFilePath == null) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, "XML Input module OutputNode.State.DataFilePath is null"); return(false); } // if input file doesn't exists or differ from loaded we need to load it if (this.OutputNode.State.DataFile == null || this.OutputNode.State.DataFile.FilePath != this.OutputNode.State.DataFilePath) { this.OutputNode.State.DataFile = WorkflowFileFactory.LoadFromXmlFile(this.OutputNode.State.DataFilePath); } Field.SetParent(this.OutputNode.State.Schema); Field.ComputeXPath(this.OutputNode.State.Schema); workInfo.Schema = this.OutputNode.State.Schema; bool isValid = false; indexer = new NodeIndexer(); /* * indexer.OnProgressChange += (object sender, float p) => * { * workInfo.UpdateProgress(this, p); * }; * * indexer.OnIndexingCompleted += (object sender, EventArgs e) => * { * workInfo.Log(this.DisplayName, NLog.LogLevel.Info, "Node indexing completed"); * }; * * indexer.OnError = (string err) => * { * workInfo.Log(this.DisplayName, NLog.LogLevel.Info, err); * }; */ workInfo.Log(this.DisplayName, NLog.LogLevel.Info, "Starting indexing process, please wait...."); indexer.LoadData(this.OutputNode.State.DataFilePath, this.OutputNode.State.Schema); workInfo.Indexer = indexer; workInfo.Log(this.DisplayName, NLog.LogLevel.Info, "Indexing process completed"); //string fieldXPath= FirstLevelSchemaNode.GetXPath(FirstLevelSchemaNode.Parent); workInfo.Log(this.DisplayName, NLog.LogLevel.Info, "Starting optionality validation process, please wait...."); foreach (var nodeName in indexer.GetFirstLevelNodes()) { int Count = indexer.GetNodeCount(nodeName); workInfo.Log(this.DisplayName, NLog.LogLevel.Info, $"[{nodeName}].Count = {Count} Nodes"); } this.OutputNode.State.DataFile.OnValidationProgressChange += (s, e) => { workInfo.UpdateProgress(this, e); }; var validationErrors = new List <string>(); isValid = this.OutputNode.State.DataFile.ValidateUsingSchema(this.OutputNode.State.Schema, indexer.GetTotalNodeCount(), out validationErrors); this.OutputNode.State.DataFile.OnValidationProgressChange = null; foreach (var message in validationErrors) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, message); } /* * indexer.NodeParsed += (object sender, NodeIndexEventArgs e) => * { * isValid = validator.Validate(RootSchemaNode, indexer.GetXMLNode(e.CurrentNode.Id)); * if (isValid == false) * { * foreach (var message in validator.ErrorList) * { * workInfo.Log(this.DisplayName, NLog.LogLevel.Info, message); * } * indexer.StopIndexing = true; * } * }; */ /* * foreach (var node in indexer.NodeMap) * { * isValid = validator.Validate(this.OutputNode.State.Schema.ChildNodes[0], indexer.GetXMLNode(node.Key, true)); * if (isValid == false) * { * foreach (var message in validator.ErrorList) * { * workInfo.Log(this.DisplayName, NLog.LogLevel.Info, message); * } * break; * } * } */ //isValid = true; if (isValid == false) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, $"Halting workflow execution due to xml validation error"); return(false); } if (indexer.ItemCount <= 0) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, $"Halting workflow execution as there are no xml nodes in xml file"); return(false); } bStatus = true; } catch (Exception ex) { workInfo.Log(this.DisplayName, NLog.LogLevel.Error, $"Fatal error occured in {this.DisplayName}"); workInfo.Log(this.DisplayName, NLog.LogLevel.Error, ex.Message); logger.Error(ex, "Workflow_excution_error"); bStatus = false; } finally { if (indexer != null) { indexer.ClearEvents(); indexer.CloseFileStream(); } } workInfo.Log(this.DisplayName, NLog.LogLevel.Info, $"Module completed processing with Status: {bStatus}"); logger.Info($"{this.DisplayName} module completed processing with status:{bStatus}"); return(bStatus); }
public void ConnectSubDendritesToOtherSubDendrites() { var nodelist = new Dictionary <int, List <DendriteNode> >(); foreach (int index in NotMergedSubDendritesIdx) { List <DendriteNode> node = sub_dendrites[index].GetEdgeNodeAndBranchNodes(); nodelist.Add(index, node); } DendriteNode masterNode = null; DendriteNode slaveNode = null; var cache = new Dictionary <Tuple2, NodeIndexer>(); while (true) { // masterとslaveの距離をあらかじめ計算しておいて配列に保存し、ソートしておくことで // 高速に計算できるが、需要がないようなので後回しにする。 // 配列に保存するのは距離がdistanceThreashold以下のものだけにしないとメモリが足りないと思います。 // Masterがどこかのノードに吸収されるかもしれないので、 // UnionFind木で管理しておきましょう int masterIndex = 0; int slaveIndex = 0; double min = Double.MaxValue; foreach (int master in NotMergedSubDendritesIdx) { for (int master2 = 0; master2 < nodelist[master].Count; master2++) { var dictKey = new Tuple2(master, master2); if (cache.ContainsKey(dictKey) == false || nodelist[cache[dictKey].ClusterIdx].Count == 0) { foreach (int slave in NotMergedSubDendritesIdx) { if (master == slave) { continue; } for (int slave2 = 0; slave2 < nodelist[slave].Count; slave2++) { double tmpdist = nodelist[master][master2].EuclideanDistanceTo(nodelist[slave][slave2]); // キャッシュがなかった場合は新しく追加 if (cache.ContainsKey(dictKey) == false) { cache.Add(dictKey, new NodeIndexer(tmpdist, slave, slave2)); } // キャッシュしていたノードが他のノードに併合されたパターン else if (nodelist[cache[dictKey].ClusterIdx].Count == 0) { cache[dictKey] = new NodeIndexer(tmpdist, slave, slave2); } // 再探索時に、キャッシュしていたノードよりも近いノードを見つけた場合には、キャッシュしている値を変更する else if (cache[dictKey].Norm >= tmpdist) { cache[dictKey] = new NodeIndexer(tmpdist, slave, slave2); } if (min >= tmpdist) { min = tmpdist; masterNode = nodelist[slave][slave2]; slaveNode = nodelist[master][master2]; masterIndex = master; slaveIndex = slave; } } } } // キャッシュが存在した場合はそれを利用 else { NodeIndexer nodeIndex = cache[dictKey]; double tmpdist = nodeIndex.Norm; int slave = nodeIndex.ClusterIdx; int slave2 = nodeIndex.NodeIdx; if (min >= tmpdist) { min = tmpdist; masterNode = nodelist[slave][slave2]; slaveNode = nodelist[master][master2]; masterIndex = master; slaveIndex = slave; } } } } if (min <= param.distanceThreshold) { //連結ノードにお互いを追加 masterNode.AddConnectedNode(slaveNode); slaveNode.AddConnectedNode(masterNode); //連結したサブノードリストはメインノードリストに吸収 nodelist[masterIndex].AddRange(nodelist[slaveIndex]); //サブを消去 nodelist[slaveIndex].Clear(); // マージされた NotMergedSubDendritesIdx.Remove(slaveIndex); sub_dendrites_volume[masterIndex] += sub_dendrites_volume[slaveIndex]; sub_dendrites_volume[slaveIndex] = 0; } else { break; } } foreach (int index in NotMergedSubDendritesIdx) { if (nodelist[index].Count > 0) { Dendrite node = sub_dendrites[index]; var edge = node.GetEdgeNodes(); node.ChangeRootNode(edge.Last()); } } }