private void Run1DJury(string name, string dirName, string alignFile = null, DCDFile dcd = null) { DateTime cpuPart1 = DateTime.Now; ClusterOutput output; jury1D ju = new jury1D(); if (beginJob != null) { beginJob(currentProcessName, ju.ToString(), dirName, "NONE"); } progressDic.Add(name, ju); //DistanceMeasure distance = CreateMeasure(); if (opt.other.alignGenerate) { opt.other.alignFileName = ""; } if (alignFile != null) { ju.PrepareJury(alignFile, opt.other.juryProfile); } else if (dcd != null) { ju.PrepareJury(dcd, alignFile, opt.other.juryProfile); } else { ju.PrepareJury(dirName, alignFile, opt.other.juryProfile); } clType = ju.ToString(); DateTime cpuPart2 = DateTime.Now; //jury1D ju = new jury1D(opt.weightHE,opt.weightC,(JuryDistance) distance); //output = ju.JuryOpt(new List<string>(ju.stateAlign.Keys)); if (ju.alignKeys != null) { output = ju.JuryOptWeights(ju.alignKeys); } else { UpadateJobInfo(name, true, false); throw new Exception("Alignment is epmty! Check errors"); } UpdateOutput(name, dirName, alignFile, output, ju.ToString(), cpuPart1, cpuPart2, ju); }
List<KeyValuePair<string,double>> CLusterRepresentJury(string dirName, List<string> targets,string profileName) { List<string> fileNames = new List<string>(targets.Count); foreach (var item in targets) fileNames.Add(dirName + Path.DirectorySeparatorChar + item); jury1D jury = new jury1D(); jury.PrepareJury(fileNames, null, profileName); ClusterOutput opt = jury.JuryOptWeights(targets); //StreamWriter kk; //if (File.Exists("cccc.txt")) // kk = File.AppendText("cccc.txt"); //else // kk = File.CreateText("cccc.txt"); //kk.WriteLine("dirNamae=" + dirName + " size=" + opt.juryLike.Count); //for (int i = 0; i < opt.juryLike.Count; i++) // kk.WriteLine(opt.juryLike[i].Key + " " + opt.juryLike[i].Value); //kk.Close(); return opt.juryLike; }
public ClusterOutput RunHTree() { HClusterNode root = null; ClusterOutput outClust = hCluster.RunHashCluster(); Dictionary <string, List <int> > clusters = hCluster.dicFinal; juryLocal.PrepareJury(hCluster.al); ClusterOutput output; List <HClusterNode> groundLevel = new List <HClusterNode>(); foreach (var item in clusters) { HClusterNode aux = new HClusterNode(); aux.parent = null; aux.joined = null; aux.setStruct = new List <string>(item.Value.Count + 1); foreach (var index in item.Value) { aux.setStruct.Add(hCluster.structNames[index]); } output = juryLocal.JuryOptWeights(aux.setStruct); aux.stateFreq = juryLocal.columns; aux.refStructure = output.juryLike[0].Key; aux.realDist = 0; aux.levelDist = 0; aux.dirName = item.Key; aux.consistency = hCluster.CalcClusterConsistency(aux.setStruct); groundLevel.Add(aux); } currentV = 20; int size = groundLevel[0].dirName.Length; double step = 80.0 / size; for (int i = 1; i < size; i++) { Dictionary <string, List <HClusterNode> > join = new Dictionary <string, List <HClusterNode> >(); foreach (var item in groundLevel) { string key = item.dirName; //key = key.Substring(0, key.Length -1); key = key.Remove(key.Length - 1); if (join.ContainsKey(key)) { join[key].Add(item); } else { List <HClusterNode> aux = new List <HClusterNode>(); aux.Add(item); join.Add(key, aux); } } groundLevel = new List <HClusterNode>(); foreach (var item in join) { HClusterNode aux = JoinNodes(item.Value); aux.levelDist = aux.levelNum = i; aux.realDist = i; aux.dirName = item.Key; groundLevel.Add(aux); } currentV += (int)step; } if (groundLevel.Count == 1) { root = groundLevel[0]; } else { root = JoinNodes(groundLevel); root.levelDist = size; root.realDist = size; } currentV = 100; ClusterOutput outHTree = new ClusterOutput(); outHTree.hNode = root; return(outHTree); }
public ClusterOutput DendrogUsingMeasures(List <string> structures) { jury1D juryLocal = new jury1D(); juryLocal.PrepareJury(al); ClusterOutput outC = null; Dictionary <string, List <int> > dic; //Console.WriteLine("Start after jury " + Process.GetCurrentProcess().PeakWorkingSet64); maxV = refPoints * 20 * 4; currentV = 0; dic = PrepareKeys(structures, false); //DebugClass.DebugOn(); // input.relClusters = input.reqClusters; // input.perData = 90; if (dic.Count > input.relClusters) { if (!input.combine) { dic = HashEntropyCombine(dic, structures, input.relClusters); } else { dic = Rpart(dic, structures, false); } //dic = FastCombineKeysNew(dic, structures, false); } Dictionary <string, int> xx = ReadLeafs(); dic = SelectClusters(xx, dic); maxV = 3; currentV = 1; //Console.WriteLine("Entropy ready after jury " + Process.GetCurrentProcess().PeakWorkingSet64); DebugClass.WriteMessage("Entropy ready"); //Alternative way to start of UQclust Tree must be finished //input.relClusters = 10000; //dic = FastCombineKeys(dic, structures, true); DebugClass.WriteMessage("dic size" + dic.Count); currentV++; //Console.WriteLine("Combine ready after jury " + Process.GetCurrentProcess().PeakWorkingSet64); DebugClass.WriteMessage("Combine Keys ready"); Dictionary <string, string> translateToCluster = new Dictionary <string, string>(dic.Count); List <string> structuresToDendrogram = new List <string>(dic.Count); List <string> structuresFullPath = new List <string>(dic.Count); DebugClass.WriteMessage("Number of clusters: " + dic.Count); int cc = 0; List <string> order = new List <string>(dic.Keys); order.Sort(delegate(string a, string b) { if (dic[b].Count == dic[a].Count) { for (int i = 0; i < a.Length; i++) { if (a[i] != b[i]) { if (a[i] == '0') { return(-1); } else { return(1); } } } } return(dic[b].Count.CompareTo(dic[a].Count)); }); foreach (var item in order) { if (dic[item].Count > 2) { List <string> cluster = new List <string>(dic[item].Count); foreach (var str in dic[item]) { cluster.Add(structures[str]); } ClusterOutput output = juryLocal.JuryOptWeights(cluster); structuresToDendrogram.Add(output.juryLike[0].Key); if (alignFile == null) { structuresFullPath.Add(dirName + Path.DirectorySeparatorChar + output.juryLike[0].Key); } else { structuresFullPath.Add(output.juryLike[0].Key); } translateToCluster.Add(output.juryLike[0].Key, item); } else { structuresToDendrogram.Add(structures[dic[item][0]]); if (alignFile == null) { structuresFullPath.Add(dirName + Path.DirectorySeparatorChar + structures[dic[item][0]]); } else { structuresFullPath.Add(structures[dic[item][0]]); } translateToCluster.Add(structures[dic[item][0]], item); } cc++; } currentV++; DebugClass.WriteMessage("Jury finished"); switch (dMeasure) { case DistanceMeasures.HAMMING: if (refJuryProfile == null || !jury1d) { throw new Exception("Sorry but for jury measure you have to define 1djury profile to find reference structure"); } else { dist = new JuryDistance(structuresFullPath, alignFile, true, profileName, refJuryProfile); } break; case DistanceMeasures.COSINE: dist = new CosineDistance(structuresFullPath, alignFile, jury1d, profileName, refJuryProfile); break; case DistanceMeasures.RMSD: dist = new Rmsd(structuresFullPath, "", jury1d, atoms, refJuryProfile); break; case DistanceMeasures.MAXSUB: dist = new MaxSub(structuresFullPath, "", jury1d, refJuryProfile); break; } // return new ClusterOutput(); DebugClass.WriteMessage("Start hierarchical"); //Console.WriteLine("Start hierarchical " + Process.GetCurrentProcess().PeakWorkingSet64); currentV = maxV; hk = new hierarchicalCluster(dist, hier, dirName); dist.InitMeasure(); //Now just add strctures to the leaves outC = hk.HierarchicalClustering(structuresToDendrogram); DebugClass.WriteMessage("Stop hierarchical"); List <HClusterNode> hLeaves = outC.hNode.GetLeaves(); foreach (var item in hLeaves) { if (translateToCluster.ContainsKey(item.setStruct[0])) { foreach (var str in dic[translateToCluster[item.setStruct[0]]]) { if (item.setStruct[0] != structures[str]) { item.setStruct.Add(structures[str]); } } item.consistency = CalcClusterConsistency(item.setStruct); } else { throw new Exception("Cannot add structure. Something is wrong"); } } outC.hNode.RedoSetStructures(); outC.runParameters = hier.GetVitalParameters(); outC.runParameters += input.GetVitalParameters(); return(outC); }
public ClusterOutput Run(List <string> structs) { maxProgress = 5; currentProgress = 0; if (hConcensus) { maxProgress++; consensus = new HammingConsensus(dMeasure.dirName, null, false, input.consensusProfile); progressObject = consensus; consensus.InitMeasure(); currentProgress += 1.0 / maxProgress; } jury = new jury1D(); progressObject = jury; currentProgress += 1.0 / maxProgress; progressObject = null; jury.PrepareJury(dMeasure.dirName, dMeasure.alignFile, input.jury1DProfileFast); currentProgress += 1.0 / maxProgress; ClusterOutput clOut = new ClusterOutput(); root.setStruct = structs; // if(hConcensus) // consensus.ToConsensusStates(structs); FastCluster(root); maxV = initNodesNum; while (st.Count > 0 && (leaves.Count + st.Count) < initNodesNum) { st.Sort( delegate(HClusterNode p1, HClusterNode p2) { return(p2.setStruct.Count.CompareTo(p1.setStruct.Count)); } ); HClusterNode node = st[0]; st.RemoveAt(0); FastCluster(node); currentV += leaves.Count + st.Count; } currentV = maxV; currentProgress += 1.0 / maxProgress; while (st.Count > 0) { HClusterNode node = st[0]; st.RemoveAt(0); leaves.Add(node); } MakeDendrogs(linkage); currentProgress += 1.0 / maxProgress; PrepareList(); root = ConnectDendrogs(linkage); root.levelDist = root.SearchMaxDist(); root.realDist = dMeasure.GetRealValue(root.levelDist); //CheckRefDistances(); //dendrogList = RearangeDendrogram(root); //root = ConnectDendrogs(); clOut.hNode = root; currentProgress += 1.0 / maxProgress; return(clOut); }