private void MakeDendrogs(AglomerativeType linkage) { ClusterOutput outCl; hierarchicalCluster dendrog = new hierarchicalCluster(dMeasure, input, dirName); currentV = 0; maxV = leaves.Count + 1; double remProgress = currentProgress; for (int i = 0; i < leaves.Count; i++) { HClusterNode c = leaves[i]; dendrog.mustRefStructure = c.setStruct[0]; outCl = dendrog.HierarchicalClustering(c.setStruct); dendrogList.Add(c); c.levelDist = outCl.hNode.levelDist; c.realDist = dMeasure.GetRealValue(c.levelDist); c.refStructure = outCl.hNode.refStructure; if (outCl.hNode.joined != null) { c.joined = new List <HClusterNode>(); foreach (var item in outCl.hNode.joined) { c.joined.Add(item); } } currentV++; currentProgress = remProgress + 1.0 / maxProgress * (double)currentV / maxV; } maxV = currentV; currentProgress = remProgress; }
private void RunHierarchicalCluster(string name, string dirName, string alignFile = null, DCDFile dcd = null) { DateTime cpuPart1 = DateTime.Now; DistanceMeasure distance = null; //distance.CalcDistMatrix(distance.structNames); // opt.hierarchical.atoms = PDB.PDBMODE.ALL_ATOMS; if (dcd != null) { distance = CreateMeasureForDCD(dcd, opt.hierarchical.distance, opt.hierarchical.atoms, opt.hierarchical.reference1DjuryAglom, opt.hierarchical.alignmentFileName, opt.hierarchical.hammingProfile, opt.hierarchical.jury1DProfileAglom); } else { distance = CreateMeasure(name, dirName, opt.hierarchical.distance, opt.hierarchical.atoms, opt.hierarchical.reference1DjuryAglom, alignFile, opt.hierarchical.hammingProfile, opt.hierarchical.jury1DProfileAglom); } DebugClass.WriteMessage("Measure Created"); hierarchicalCluster hk = new hierarchicalCluster(distance, opt.hierarchical, dirName); if (beginJob != null) { beginJob(currentProcessName, hk.ToString(), dirName, distance.ToString()); } clType = hk.ToString(); ClusterOutput output; progressDic.Add(name, hk); distance.InitMeasure(); DateTime cpuPart2 = DateTime.Now; output = hk.HierarchicalClustering(new List <string>(distance.structNames.Keys)); UpdateOutput(name, dirName, alignFile, output, distance.ToString(), cpuPart1, cpuPart2, hk); }
public ClusterOutput DendrogUsingMeasures(List <string> structures) { jury1D juryLocal = new jury1D(); juryLocal.PrepareJury(al); ClusterOutput outC = null; Dictionary <string, List <int> > dic; //Console.WriteLine("Start after jury " + Process.GetCurrentProcess().PeakWorkingSet64); maxV = refPoints * 20 * 4; currentV = 0; dic = PrepareKeys(structures, false); //DebugClass.DebugOn(); // input.relClusters = input.reqClusters; // input.perData = 90; if (dic.Count > input.relClusters) { if (!input.combine) { dic = HashEntropyCombine(dic, structures, input.relClusters); } else { dic = Rpart(dic, structures, false); } //dic = FastCombineKeysNew(dic, structures, false); } Dictionary <string, int> xx = ReadLeafs(); dic = SelectClusters(xx, dic); maxV = 3; currentV = 1; //Console.WriteLine("Entropy ready after jury " + Process.GetCurrentProcess().PeakWorkingSet64); DebugClass.WriteMessage("Entropy ready"); //Alternative way to start of UQclust Tree must be finished //input.relClusters = 10000; //dic = FastCombineKeys(dic, structures, true); DebugClass.WriteMessage("dic size" + dic.Count); currentV++; //Console.WriteLine("Combine ready after jury " + Process.GetCurrentProcess().PeakWorkingSet64); DebugClass.WriteMessage("Combine Keys ready"); Dictionary <string, string> translateToCluster = new Dictionary <string, string>(dic.Count); List <string> structuresToDendrogram = new List <string>(dic.Count); List <string> structuresFullPath = new List <string>(dic.Count); DebugClass.WriteMessage("Number of clusters: " + dic.Count); int cc = 0; List <string> order = new List <string>(dic.Keys); order.Sort(delegate(string a, string b) { if (dic[b].Count == dic[a].Count) { for (int i = 0; i < a.Length; i++) { if (a[i] != b[i]) { if (a[i] == '0') { return(-1); } else { return(1); } } } } return(dic[b].Count.CompareTo(dic[a].Count)); }); foreach (var item in order) { if (dic[item].Count > 2) { List <string> cluster = new List <string>(dic[item].Count); foreach (var str in dic[item]) { cluster.Add(structures[str]); } ClusterOutput output = juryLocal.JuryOptWeights(cluster); structuresToDendrogram.Add(output.juryLike[0].Key); if (alignFile == null) { structuresFullPath.Add(dirName + Path.DirectorySeparatorChar + output.juryLike[0].Key); } else { structuresFullPath.Add(output.juryLike[0].Key); } translateToCluster.Add(output.juryLike[0].Key, item); } else { structuresToDendrogram.Add(structures[dic[item][0]]); if (alignFile == null) { structuresFullPath.Add(dirName + Path.DirectorySeparatorChar + structures[dic[item][0]]); } else { structuresFullPath.Add(structures[dic[item][0]]); } translateToCluster.Add(structures[dic[item][0]], item); } cc++; } currentV++; DebugClass.WriteMessage("Jury finished"); switch (dMeasure) { case DistanceMeasures.HAMMING: if (refJuryProfile == null || !jury1d) { throw new Exception("Sorry but for jury measure you have to define 1djury profile to find reference structure"); } else { dist = new JuryDistance(structuresFullPath, alignFile, true, profileName, refJuryProfile); } break; case DistanceMeasures.COSINE: dist = new CosineDistance(structuresFullPath, alignFile, jury1d, profileName, refJuryProfile); break; case DistanceMeasures.RMSD: dist = new Rmsd(structuresFullPath, "", jury1d, atoms, refJuryProfile); break; case DistanceMeasures.MAXSUB: dist = new MaxSub(structuresFullPath, "", jury1d, refJuryProfile); break; } // return new ClusterOutput(); DebugClass.WriteMessage("Start hierarchical"); //Console.WriteLine("Start hierarchical " + Process.GetCurrentProcess().PeakWorkingSet64); currentV = maxV; hk = new hierarchicalCluster(dist, hier, dirName); dist.InitMeasure(); //Now just add strctures to the leaves outC = hk.HierarchicalClustering(structuresToDendrogram); DebugClass.WriteMessage("Stop hierarchical"); List <HClusterNode> hLeaves = outC.hNode.GetLeaves(); foreach (var item in hLeaves) { if (translateToCluster.ContainsKey(item.setStruct[0])) { foreach (var str in dic[translateToCluster[item.setStruct[0]]]) { if (item.setStruct[0] != structures[str]) { item.setStruct.Add(structures[str]); } } item.consistency = CalcClusterConsistency(item.setStruct); } else { throw new Exception("Cannot add structure. Something is wrong"); } } outC.hNode.RedoSetStructures(); outC.runParameters = hier.GetVitalParameters(); outC.runParameters += input.GetVitalParameters(); return(outC); }