public void TestServer() { Tensorflow.ServerDef def = new ServerDef(); ClusterDef clusterDef = new ClusterDef(); JobDef jd = new JobDef(); clusterDef.Job.Add(jd); def.Cluster = clusterDef; byte[] pbuff; using (MemoryStream ms = new MemoryStream()) { def.WriteTo(ms); pbuff = ms.ToArray(); } /* * using (Emgu.TF.Server s = new Emgu.TF.Server(pbuff)) * { * String target = s.Target; * }*/ }
public SimCluster(ClusterDef cluster, SimRuntime runtime) { Runtime = runtime; Rand = Runtime.Rand; // we register each link as a network service foreach (var(id, def) in cluster.Routes) { var service = new ServiceId($"network:{id.Source}->{id.Destinaton}"); var scheduler = new SimScheduler(Runtime, service); Routes.Add(id, new SimRoute(scheduler, this, id, def)); } foreach (var machine in cluster.Services.GroupBy(i => i.Key.Machine)) { var m = new SimMachine(machine.Key, runtime, this); foreach (var pair in machine) { m.Install(pair.Key, pair.Value); } Machines.Add(machine.Key, m); } }
internal bool CheckResult(VariableDef[] variableDefs, ClusterDef[] clusterDefs, ConstraintDef[] constraintDefsX, ConstraintDef[] constraintDefsY, double[] expectedPositionsX, double[] expectedPositionsY, bool checkResults) { for (uint id = 0; id < variableDefs.Length; ++id) { variableDefs[id].SetExpected(id, expectedPositionsX[id], expectedPositionsY[id]); } return CheckResult(variableDefs, clusterDefs, constraintDefsX, constraintDefsY, checkResults); }
private void VerifyClustersDoNotOverlapWithNonChildNodesInTheirOwnHierarchy(double epsilon, KeyValuePair<List<ClusterDef>, List<VariableDef>> kvpCurHier, ClusterDef[] localClusDefs, ref bool succeeded) { int idxStartVar = 0; foreach (ClusterDef clusCur in localClusDefs) { if (clusCur.IsEmpty) { continue; } VariableDef[] localVarDefs = kvpCurHier.Value.OrderBy(varDef => varDef.Top).ToArray(); for (int jj = idxStartVar; jj < localVarDefs.Length; ++jj) { VariableDef varCheck = localVarDefs[jj]; // Minimize variable-list traversal. if (varCheck.Top < (clusCur.Top - epsilon)) { idxStartVar = jj; } // If the variable ends before the cluster starts, there's no overlap. if ((clusCur.Top - varCheck.Bottom - this.MinPaddingY) > -epsilon) { continue; } // Rounding error may leave these calculations slightly greater or less than zero. // Since margin is calculated only for inner edges and here we are testing for // sibling rather than nested nodes, we don't use margin here. // Name is <relativeToVarCur><RelativeToVarCheck> double bottomTopOverlap = varCheck.Top - clusCur.Bottom - this.MinPaddingY; if (bottomTopOverlap >= -epsilon) { // Out of range of clusCur's size, so we're done with clusCur. break; } // Does varCheck's left or right border overlap? Negative overlap means yes. // Again, margins are only cluster-internal and we're testing external boundaries // here; so the cluster size should have been calculated large enough and we only // look at padding. double xa = varCheck.Left - clusCur.Right - this.MinPaddingX; double xb = clusCur.Left - varCheck.Right - this.MinPaddingX; if ((xa < -epsilon) && (xb < -epsilon)) { // Let's see if it's an ancestor. bool hasSideOverlap = true; foreach (ClusterDef clusDefParent in varCheck.ParentClusters) { for (ClusterDef clusDefAncestor = clusDefParent; null != clusDefAncestor; clusDefAncestor = clusDefAncestor.ParentClusterDef) { if (clusDefAncestor == clusCur) { hasSideOverlap = false; break; } } if (!hasSideOverlap) { break; } } if (hasSideOverlap) { // Uh oh. this.WriteLine("Error {0}: Overlap exists between Cluster '{1}' and non-child Node '{2}'", FailTag("OlapClusNode"), clusCur.ClusterId, varCheck.IdString); this.WriteLine(" Cluster {0}: L/R T/B {1:F5}/{2:F5} {3:F5}/{4:F5}", clusCur.ClusterId, clusCur.Left, clusCur.Right, clusCur.Top, clusCur.Bottom); this.WriteLine(" Node {0}: L/R T/B {1:F5}/{2:F5} {3:F5}/{4:F5}", varCheck.IdString, varCheck.Left, varCheck.Right, varCheck.Top, varCheck.Bottom); succeeded = false; } } // endif overlap within epsilon } // endfor each non-child variable } }
private void VerifyClustersDoNotOverlapWithNonParentClustersInTheirOwnHierarchy(double epsilon, out ClusterDef[] localClusDefs, KeyValuePair<List<ClusterDef>, List<VariableDef>> kvpCurHier, ref bool succeeded) { localClusDefs = kvpCurHier.Key.OrderBy(clusDef => clusDef.Top).ToArray(); for (int ii = 0; ii < localClusDefs.Length; ++ii) { ClusterDef clusCur = localClusDefs[ii]; if (clusCur.IsEmpty || clusCur.IsNewHierarchy) { continue; } for (int jj = ii + 1; jj < localClusDefs.Length; ++jj) { ClusterDef clusCheck = localClusDefs[jj]; if (clusCheck.IsEmpty || clusCheck.IsNewHierarchy) { continue; } // Rounding error may leave these calculations slightly greater or less than zero. // Since margin is calculated only for inner edges and here we are testing for // sibling rather than nested nodes, we don't use margin here. // Name is <relativeToVarCur><RelativeToVarCheck> double bottomTopOverlap = clusCheck.Top - clusCur.Bottom - this.MinPaddingY; if (bottomTopOverlap >= -epsilon) { // Out of range of clusCur's size, so we're done with clusCur. break; } // Does clusCheck's left or right border overlap? Negative overlap means yes. // Again, margins are only cluster-internal and we're testing external boundaries // here; so the cluster size should have been calculated large enough and we only // look at padding. double xa = clusCheck.Left - clusCur.Right - this.MinPaddingX; double xb = clusCur.Left - clusCheck.Right - this.MinPaddingX; if ((xa < -epsilon) && (xb < -epsilon)) { // Let's see if it's a parent. bool hasSideOverlap = true; for (ClusterDef clusDefParent = clusCheck.ParentClusterDef; null != clusDefParent; clusDefParent = clusDefParent.ParentClusterDef) { if (clusDefParent == clusCur) { hasSideOverlap = false; break; } } // Note: This test may fail if clusCheck is a parent of clusCur, but in that case // clusCheck should be outside clusCur - which means we had another error before this, // that cluster {clusCheck} is outside the bounds of parent cluster {clusCur}. if (hasSideOverlap) { // Uh oh. this.WriteLine("Error {0}: Overlap exists between sibling Clusters '{1}' and '{2}'", FailTag("OlapSibClus"), clusCur.ClusterId, clusCheck.ClusterId); this.WriteLine(" Cluster {0}: L/R T/B {1:F5}/{2:F5} {3:F5}/{4:F5}", clusCur.ClusterId, clusCur.Left, clusCur.Right, clusCur.Top, clusCur.Bottom); this.WriteLine(" Cluster {0}: L/R T/B {1:F5}/{2:F5} {3:F5}/{4:F5}", clusCheck.ClusterId, clusCheck.Left, clusCheck.Right, clusCheck.Top, clusCheck.Bottom); succeeded = false; } } // endif overlap within epsilon } // endfor localClusDefs[jj] } }
public SimRuntime(ClusterDef def) { Def = def; _scheduler = new SimScheduler(this, new ServiceId("simulation:proc")); _factory = new TaskFactory(_scheduler); }
// Note: cConstraintsPerVar is an artifact of ProjectionSolver testing and is ignored // in OverlapRemoval since we generate only the necessary constraints for removing overlaps (it // could be extended to generate additional overlaps but that has not yet been done). public bool CreateFile(uint cVars, uint cConstraintsPerVar, string strOutFile) { Validate.IsTrue(cVars > 0, "Test file creation requires cVars > 0"); Validate.AreNotEqual((uint)0, cConstraintsPerVar, "Test file creation requires cConstraintsPerVar > 0"); // Generate a new set of Variable definitions. var lstVarDefs = new List <VariableDef>(); Random rng = TestConstraints.NewRng(); // Print this so that in case of errors we can re-run with this seed. Console.WriteLine("Creating test file with seed 0x{0}", TestConstraints.RandomSeed.ToString("X")); // // This code was adapted from satisfy_inc for ProjSolver, and then the second dimension // and sizes were added for OverlapRemoval, as well as other extensions. // for (int idxVar = 0; idxVar < cVars; ++idxVar) { double dblPosX = TestConstraints.RoundRand(rng, TestConstraints.MaxNodePosition); double dblPosY = TestConstraints.RoundRand(rng, TestConstraints.MaxNodePosition); // Ensure nonzero sizes. double dblSizeX = TestConstraints.RoundRand(rng, MaxSize) + 1.0; double dblSizeY = TestConstraints.RoundRand(rng, MaxSize) + 1.0; double dblWeightX = 1.0, dblWeightY = 1.0; if (TestConstraints.MaxWeightToGenerate > 0.0) { // Ensure nonzero weights. dblWeightX = TestConstraints.RoundRand(rng, TestConstraints.MaxWeightToGenerate) + 0.01; dblWeightY = TestConstraints.RoundRand(rng, TestConstraints.MaxWeightToGenerate) + 0.01; } lstVarDefs.Add(new VariableDef((uint)idxVar , dblPosX, dblPosY , dblSizeX, dblSizeY , dblWeightX, dblWeightY)); } // endfor idxVar List <ClusterDef> lstClusDefs = null; if (MaxClusters > 0) { // If we are generating a random number of clusters, get that number here. int cClusters = MaxClusters; if (WantRandomClusters) { cClusters = rng.Next(cClusters); } // Add the first cluster, at the root level - hence no parent and no borders. // No BorderInfo needed for root clusters - it's ignored. RoundRand returns // 0 if its arg is 0. lstClusDefs = new List <ClusterDef>(cClusters) { new ClusterDef( TestConstraints.RoundRand(rng, MinClusterSizeX), TestConstraints.RoundRand(rng, MinClusterSizeY)) { IsNewHierarchy = true } }; if (TestGlobals.VerboseLevel >= 3) { Console.WriteLine("Level-1 cluster: {0}", lstClusDefs[0].ClusterId); } // If we are doing a single hierarchy only, restrict the range to the current set of parents, // otherwise ourselves about a 10% chance of being at the root level instead of being nested. int cRootExtra = WantSingleClusterRoot ? 0 : Math.Max(cClusters / 10, 1); // Create the clusters, randomly selecting a parent for each from the items previously // put in the list. for (int idxNewClus = 1; idxNewClus < cClusters; ++idxNewClus) { int idxParentClus = rng.Next(lstClusDefs.Count + cRootExtra); // Allow out-of-bounds index as "root level" flag // Margin stuff stays 0 if MaxMargin is 0 (and border stuff is ignored if it's a root cluster). var clusNew = new ClusterDef(TestConstraints.RoundRand(rng, MinClusterSizeX), TestConstraints.RoundRand(rng, MinClusterSizeY), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedLeftBorder), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedRightBorder), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedTopBorder), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedBottomBorder)); lstClusDefs.Add(clusNew); // If we are doing a single hierarchy only, restrict the range to the current set of parents. // Otherwise, if the parent index is >= the index we're adding now, that's our way of // selecting the node to be at the root level. if (WantSingleClusterRoot) { idxParentClus %= idxNewClus; } if (idxParentClus < idxNewClus) { ClusterDef clusParent = lstClusDefs[idxParentClus]; clusParent.AddClusterDef(clusNew); if (TestGlobals.VerboseLevel >= 3) { Console.Write("Nested cluster: {0}", clusNew.ClusterId); for (; null != clusParent; clusParent = clusParent.ParentClusterDef) { Console.Write(" {0}", clusParent.ClusterId); } Console.WriteLine(); } } else { // Create a simple cluster since root clusters don't honor borders. clusNew.IsNewHierarchy = true; if (TestGlobals.VerboseLevel >= 3) { Console.WriteLine("Level-1 cluster: {0}", clusNew.ClusterId); } } } // Now run through the nodes and randomly assign them into the clusters. foreach (VariableDef varDef in lstVarDefs) { int idxParentClus = rng.Next(lstClusDefs.Count + cRootExtra); if (idxParentClus < lstClusDefs.Count) { // Don't write the ones at the root level. lstClusDefs[idxParentClus].AddVariableDef(varDef); } else if (TestGlobals.VerboseLevel >= 3) { Console.WriteLine("Root var: {0}", varDef.IdString); } } } // endif MaxClusters > 0 return(WriteTestFile(lstVarDefs, lstClusDefs, strOutFile)); }
private ESection ProcessCluster(int lineNumber, string currentLine, ESection currentSection, ref EClusterState currentClusterState) { if (currentLine.StartsWith( TestFileStrings.EndCluster, StringComparison.OrdinalIgnoreCase)) { if (EClusterState.Variable != currentClusterState) { Validate.Fail(string.Format("Unexpected END CLUSTER line {0}: {1}", lineNumber, currentLine)); } currentSection = ESection.PreClusterOrConstraints; this.ClusterDefs.Add(this.currentClusterDef); this.currentClusterDef = null; return currentSection; } if (EClusterState.Id == currentClusterState) { Match m = TestFileStrings.ParseClusterId.Match(currentLine); if (m.Success) { // Verify the Clusters in the file are sorted on ID. This makes it easier for the results // reading to be in sync, as we'll index ClusterDefs by [Parent - 1]. var id = int.Parse(m.Groups["id"].ToString()); Validate.IsTrue(this.currentClusterDef.ClusterId == id, "Out of order CLUSTER id"); } else { Validate.Fail(string.Format("Unparsable CLUSTER ID line {0}: {1}", lineNumber, currentLine)); } currentClusterState = EClusterState.Parent; } else if (EClusterState.Parent == currentClusterState) { Match m = TestFileStrings.ParseClusterParent.Match(currentLine); if (m.Success) { int parentId = int.Parse(m.Groups["parent"].ToString()); // Cluster IDs are 1-based because we use 0 for the "root cluster". if (0 != parentId) { ClusterDef clusParent = this.ClusterDefs[parentId - 1]; Validate.AreEqual(clusParent.ClusterId, parentId, "clusParent.ClusterId mismatch with idParent"); clusParent.AddClusterDef(this.currentClusterDef); } } else { Validate.Fail(string.Format("Unparsable CLUSTER Parent line {0}: {1}", lineNumber, currentLine)); } currentClusterState = EClusterState.LeftBorder; } else if (EClusterState.LeftBorder == currentClusterState) { // Older files didn't have MinSize. Match m = TestFileStrings.ParseClusterMinSize.Match(currentLine); if (m.Success) { this.currentClusterDef.MinimumSizeX = double.Parse(m.Groups["X"].ToString()); this.currentClusterDef.MinimumSizeY = double.Parse(m.Groups["Y"].ToString()); return currentSection; } if (0 == string.Compare("NewHierarchy", currentLine, StringComparison.OrdinalIgnoreCase)) { // NewHierarchy is optional. this.currentClusterDef.IsNewHierarchy = true; return currentSection; } this.currentClusterDef.LeftBorderInfo = ParseBorderInfo("Left", currentLine, lineNumber); currentClusterState = EClusterState.RightBorder; } else if (EClusterState.RightBorder == currentClusterState) { this.currentClusterDef.RightBorderInfo = ParseBorderInfo("Right", currentLine, lineNumber); currentClusterState = EClusterState.TopBorder; } else if (EClusterState.TopBorder == currentClusterState) { this.currentClusterDef.TopBorderInfo = ParseBorderInfo("Top", currentLine, lineNumber); currentClusterState = EClusterState.BottomBorder; } else if (EClusterState.BottomBorder == currentClusterState) { this.currentClusterDef.BottomBorderInfo = ParseBorderInfo("Bottom", currentLine, lineNumber); currentClusterState = EClusterState.Variable; } else if (EClusterState.Variable == currentClusterState) { Match m = TestFileStrings.ParseClusterVariable.Match(currentLine); if (m.Success) { int variableId = int.Parse(m.Groups["var"].ToString()); this.currentClusterDef.AddVariableDef(this.VariableDefs[variableId]); } else { Validate.Fail(string.Format("Unparsable CLUSTER Variable line {0}: {1}", lineNumber, currentLine)); } } return currentSection; }
private ESection ProcessClusterOrConstraints(string currentLine, ESection currentSection, ref EClusterState currentClusterState) { if (currentLine.StartsWith( TestFileStrings.BeginCluster, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Cluster; currentClusterState = EClusterState.Id; this.currentClusterDef = new ClusterDef(this.MinClusterSizeX, this.MinClusterSizeY); return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginConstraintsX, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintDefsX; return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginConstraintsY, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintsDefY; return currentSection; } if (currentLine.StartsWith( TestFileStrings.BeginConstraints, StringComparison.OrdinalIgnoreCase)) { currentSection = ESection.Constraints; this.currentConstraintDefs = this.ConstraintDefsX; return currentSection; } return currentSection; }
private static void Setup__Test_Cluster_Pad0_Sibling9_Nest1_MinSize(int width, int height, out VariableDef[] variableDefs, out ClusterDef[] clusterDefs) { variableDefs = new[] { //ordinal posXY sizeXY There is only one cluster, with 'b' vars; 'a' is root /* 0 */ new VariableDef(1.0, 1.0, 3.0, 3.0), /* 1 */ new VariableDef(2.0, 1.0, 3.0, 3.0), /* 2 */ new VariableDef(3.0, 1.0, 3.0, 3.0), /* 3 */ new VariableDef(1.0, 2.0, 3.0, 3.0), /* 4 */ new VariableDef(2.0, 2.0, 3.0, 3.0), /* 5 */ new VariableDef(3.0, 2.0, 3.0, 3.0), /* 6 */ new VariableDef(1.0, 3.0, 3.0, 3.0), /* 7 */ new VariableDef(2.0, 3.0, 3.0, 3.0), /* 8 */ new VariableDef(3.0, 3.0, 3.0, 3.0), }; clusterDefs = new ClusterDef[10]; clusterDefs[0] = new ClusterDef(); // Parent cluster for (int ii = 1; ii <= 9; ++ii) { // 1-based to skip the parent cluster. clusterDefs[ii] = new ClusterDef(width, height); clusterDefs[ii].AddVariableDef(variableDefs[ii - 1]); // 0-based clusterDefs[0].AddClusterDef(clusterDefs[ii]); } }
// Note: cConstraintsPerVar is an artifact of ProjectionSolver testing and is ignored // in OverlapRemoval since we generate only the necessary constraints for removing overlaps (it // could be extended to generate additional overlaps but that has not yet been done). public bool CreateFile(uint cVars, uint cConstraintsPerVar, string strOutFile) { Validate.IsTrue(cVars > 0, "Test file creation requires cVars > 0"); Validate.AreNotEqual((uint)0, cConstraintsPerVar, "Test file creation requires cConstraintsPerVar > 0"); // Generate a new set of Variable definitions. var lstVarDefs = new List<VariableDef>(); Random rng = TestConstraints.NewRng(); // Print this so that in case of errors we can re-run with this seed. Console.WriteLine("Creating test file with seed 0x{0}", TestConstraints.RandomSeed.ToString("X")); // // This code was adapted from satisfy_inc for ProjSolver, and then the second dimension // and sizes were added for OverlapRemoval, as well as other extensions. // for (int idxVar = 0; idxVar < cVars; ++idxVar) { double dblPosX = TestConstraints.RoundRand(rng, TestConstraints.MaxNodePosition); double dblPosY = TestConstraints.RoundRand(rng, TestConstraints.MaxNodePosition); // Ensure nonzero sizes. double dblSizeX = TestConstraints.RoundRand(rng, MaxSize) + 1.0; double dblSizeY = TestConstraints.RoundRand(rng, MaxSize) + 1.0; double dblWeightX = 1.0, dblWeightY = 1.0; if (TestConstraints.MaxWeightToGenerate > 0.0) { // Ensure nonzero weights. dblWeightX = TestConstraints.RoundRand(rng, TestConstraints.MaxWeightToGenerate) + 0.01; dblWeightY = TestConstraints.RoundRand(rng, TestConstraints.MaxWeightToGenerate) + 0.01; } lstVarDefs.Add(new VariableDef((uint)idxVar , dblPosX, dblPosY , dblSizeX, dblSizeY , dblWeightX, dblWeightY)); } // endfor idxVar List<ClusterDef> lstClusDefs = null; if (MaxClusters > 0) { // If we are generating a random number of clusters, get that number here. int cClusters = MaxClusters; if (WantRandomClusters) { cClusters = rng.Next(cClusters); } // Add the first cluster, at the root level - hence no parent and no borders. // No BorderInfo needed for root clusters - it's ignored. RoundRand returns // 0 if its arg is 0. lstClusDefs = new List<ClusterDef>(cClusters) { new ClusterDef( TestConstraints.RoundRand(rng, MinClusterSizeX), TestConstraints.RoundRand(rng, MinClusterSizeY)) { IsNewHierarchy = true } }; if (TestGlobals.VerboseLevel >= 3) { Console.WriteLine("Level-1 cluster: {0}", lstClusDefs[0].ClusterId); } // If we are doing a single hierarchy only, restrict the range to the current set of parents, // otherwise ourselves about a 10% chance of being at the root level instead of being nested. int cRootExtra = WantSingleClusterRoot ? 0 : Math.Max(cClusters / 10, 1); // Create the clusters, randomly selecting a parent for each from the items previously // put in the list. for (int idxNewClus = 1; idxNewClus < cClusters; ++idxNewClus) { int idxParentClus = rng.Next(lstClusDefs.Count + cRootExtra); // Allow out-of-bounds index as "root level" flag // Margin stuff stays 0 if MaxMargin is 0 (and border stuff is ignored if it's a root cluster). var clusNew = new ClusterDef(TestConstraints.RoundRand(rng, MinClusterSizeX), TestConstraints.RoundRand(rng, MinClusterSizeY), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedLeftBorder), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedRightBorder), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedTopBorder), GetBorderInfo(lstClusDefs.Count, TestConstraints.RoundRand(rng, MaxMargin), WantFixedBottomBorder)); lstClusDefs.Add(clusNew); // If we are doing a single hierarchy only, restrict the range to the current set of parents. // Otherwise, if the parent index is >= the index we're adding now, that's our way of // selecting the node to be at the root level. if (WantSingleClusterRoot) { idxParentClus %= idxNewClus; } if (idxParentClus < idxNewClus) { ClusterDef clusParent = lstClusDefs[idxParentClus]; clusParent.AddClusterDef(clusNew); if (TestGlobals.VerboseLevel >= 3) { Console.Write("Nested cluster: {0}", clusNew.ClusterId); for (; null != clusParent; clusParent = clusParent.ParentClusterDef) { Console.Write(" {0}", clusParent.ClusterId); } Console.WriteLine(); } } else { // Create a simple cluster since root clusters don't honor borders. clusNew.IsNewHierarchy = true; if (TestGlobals.VerboseLevel >= 3) { Console.WriteLine("Level-1 cluster: {0}", clusNew.ClusterId); } } } // Now run through the nodes and randomly assign them into the clusters. foreach (VariableDef varDef in lstVarDefs) { int idxParentClus = rng.Next(lstClusDefs.Count + cRootExtra); if (idxParentClus < lstClusDefs.Count) { // Don't write the ones at the root level. lstClusDefs[idxParentClus].AddVariableDef(varDef); } else if (TestGlobals.VerboseLevel >= 3) { Console.WriteLine("Root var: {0}", varDef.IdString); } } } // endif MaxClusters > 0 return WriteTestFile(lstVarDefs, lstClusDefs, strOutFile); }