public virtual void deleteStates(HashSet toDelete) { if (toDelete.contains(this.start)) { java.lang.System.err.println("Cannot delete start state."); return; } ArrayList arrayList = new ArrayList(); Iterator iterator = this.states.iterator(); while (iterator.hasNext()) { State state = (State)iterator.next(); if (!toDelete.contains(state)) { arrayList.add(state); ArrayList arrayList2 = new ArrayList(); for (int i = 0; i < state.getNumArcs(); i++) { Arc arc = state.getArc(i); if (!toDelete.contains(arc.getNextState())) { arrayList2.add(arc); } } state.setArcs(arrayList2); } } this.states = arrayList; this.remapStateIds(); }
public override void run() { Linguist linguist = this.getLinguist(); LinkedList linkedList = new LinkedList(); HashSet hashSet = new HashSet(); int num = 0; linkedList.add(linguist.getSearchGraph().getInitialState()); while (!linkedList.isEmpty()) { SearchState searchState = (SearchState)linkedList.remove(0); if (!hashSet.contains(searchState)) { num++; this.incrementStateTypeCount(searchState); hashSet.add(searchState); SearchStateArc[] successors = searchState.getSuccessors(); for (int i = successors.Length - 1; i >= 0; i--) { SearchState state = successors[i].getState(); linkedList.add(state); } } } [email protected]("# ----------- linguist stats ------------ "); [email protected](new StringBuilder().append("# Total states: ").append(num).toString()); this.dumpStateTypeCounts(); }
// Traverses across the BST, in order, adding elements to the set bool traverse(Node <T> node, int k, HashSet <Node <T> > set) { // If the node has a left child, traverse it first if (node.left != null) { return(traverse(node.left, k, set)); } // Check to see if the set contains the element that would sum // with the node we're checking's element to equal k if (set.contains(k - node.element)) { return(true); } // Add node's element to the set set.add(node.element); // If the node has a right child, traverse it after if (node.right != null) { return(traverse(node.right, k, set)); } else { // No two node's with elements summing k exist in the BST, // since you reached the end and found nothing return(false); } }
public static void computePromotion1(PromotionType1 promotionType1, int customerOrder, bool isFirst, HashSet <char> skuIdSet) { if (customerOrder >= promotionType1.getTotalItems()) { // Remove from the set as the promotion is applied on this skuId if (skuIdSet.contains(promotionType1.getSkuId())) { skuIdSet.Remove(promotionType1.getSkuId()); } totalPrice += promotionType1.getPrice(); customerOrder -= promotionType1.getTotalItems(); computePromotion1(promotionType1, customerOrder, false, skuIdSet); } else { if (isFirst) { return; } else { totalPrice += customerOrder * currentPrice.get(promotionType1.getSkuId()); } } }
private static Iterable <MemberInfo> filterMembers(Library typeSystem, TypeInfo scope, Iterable <MemberInfo> members) { HashSet <MemberInfo> exclude = null; foreach (var mi in members) { if (exclude != null && exclude.contains(mi)) { continue; } if (!scope.canAccessMember(mi.DeclaringType, mi.IsPublic, mi.IsProtected, mi.IsPrivate)) { continue; } if (mi.isOverridingMembers(typeSystem)) { foreach (var m in mi.getOverridenMembers(typeSystem)) { if (exclude == null) { exclude = new HashSet <MemberInfo>(); } exclude.add(m); } } yield return(mi); } }
public static Set collectStates(SentenceHMMState start) { HashSet hashSet = new HashSet(); LinkedList linkedList = new LinkedList(); linkedList.add(start); while (!linkedList.isEmpty()) { SentenceHMMState sentenceHMMState = (SentenceHMMState)linkedList.remove(0); hashSet.add(sentenceHMMState); SearchStateArc[] successors = sentenceHMMState.getSuccessors(); SearchStateArc[] array = successors; int num = array.Length; for (int i = 0; i < num; i++) { SearchStateArc searchStateArc = array[i]; SentenceHMMState sentenceHMMState2 = (SentenceHMMState)searchStateArc.getState(); if (!hashSet.contains(sentenceHMMState2) && !linkedList.contains(sentenceHMMState2)) { linkedList.add(sentenceHMMState2); } } } return(hashSet); }
//https://unicode-table.com/ //Chinese [\u2E80-\u9fa5] //Japanese [\u0800-\u4e00]| //Korean [\uAC00-\uD7A3] [\u3130-\u318F] public bool isWord(char c) { //English if (c >= 'a' && c <= 'z') { return(true); } if (c >= '0' && c <= '9') { return(true); } //Russian if (c >= 0x0400 && c <= 0x052f) { return(true); } //Germen if (c >= 0xc0 && c <= 0xff) { return(true); } //Vietnamese if (viet_set.contains(c)) { return(true); } //special return(c == '-' || c == '#'); }
public static void apply(Fst fst) { if (fst.getSemiring() == null) { [email protected]("Fst has no semiring."); return; } HashSet hashSet = new HashSet(); HashSet hashSet2 = new HashSet(); ArrayList[] array = new ArrayList[fst.getNumStates()]; ArrayList arrayList = new ArrayList(); arrayList.add(new ArrayList()); Connect.depthFirstSearch(fst, hashSet, arrayList, array, hashSet2); HashSet hashSet3 = new HashSet(); for (int i = 0; i < fst.getNumStates(); i++) { State state = fst.getState(i); if (!hashSet.contains(state) && !hashSet2.contains(state)) { hashSet3.add(state); } } fst.deleteStates(hashSet3); }
private static void calcCoAccessible(Fst fst, State state, ArrayList arrayList, HashSet hashSet) { ArrayList arrayList2 = new ArrayList(); Iterator iterator = arrayList.iterator(); while (iterator.hasNext()) { ArrayList arrayList3 = (ArrayList)iterator.next(); int num = arrayList3.lastIndexOf(state); if (num != -1 && (state.getFinalWeight() != fst.getSemiring().zero() || hashSet.contains(state))) { for (int i = num; i > -1; i--) { if (!hashSet.contains(arrayList3.get(i))) { arrayList2.add(arrayList3.get(i)); hashSet.add(arrayList3.get(i)); } } } } iterator = arrayList2.iterator(); while (iterator.hasNext()) { State state2 = (State)iterator.next(); Connect.calcCoAccessible(fst, state2, arrayList, hashSet); } }
internal virtual void setConfigurableClass(Class @class) { this.ownerClass = @class; if (this.isInstanciated()) { string text = "class is already instantiated"; throw new RuntimeException(text); } HashSet hashSet = new HashSet(); Map map = PropertySheet.parseClass(this.ownerClass); Iterator iterator = map.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry)iterator.next(); try { string text2 = (string)((Field)entry.getKey()).get(null, PropertySheet.__GetCallerID()); if (!PropertySheet.assertionsDisabled && hashSet.contains(text2)) { object obj = new StringBuilder().append("duplicate property-name for different properties: ").append(text2).append(" for the class ").append(@class).toString(); throw new AssertionError(obj); } this.registerProperty(text2, new S4PropWrapper((java.lang.annotation.Annotation)entry.getValue())); hashSet.add(text2); } catch (IllegalAccessException ex) { Throwable.instancehelper_printStackTrace(ex); } continue; } }
private void dumpSearchGraph(PrintStream @out, SearchState searchState) { LinkedList linkedList = new LinkedList(); HashSet hashSet = new HashSet(); this.startDump(@out); linkedList.add(new StateLevel(searchState, 0)); while (!linkedList.isEmpty()) { StateLevel stateLevel = (StateLevel)linkedList.remove(0); int level = stateLevel.getLevel(); SearchState state = stateLevel.getState(); if (!hashSet.contains(state.getSignature())) { hashSet.add(state.getSignature()); this.startDumpNode(@out, state, level); SearchStateArc[] successors = state.getSuccessors(); for (int i = successors.Length - 1; i >= 0; i--) { SearchState state2 = successors[i].getState(); this.dumpArc(@out, state, successors[i], level); if (this.depthFirst) { linkedList.add(0, new StateLevel(state2, level + 1)); } else { linkedList.add(new StateLevel(state2, level + 1)); } } this.endDumpNode(@out, state, level); } } this.endDump(@out); }
Boolean visitStatement(StatementNode s, HashSet <StatementNode> visited) { if (visited.contains(s)) { return(Boolean.FALSE); } do { visited.add(s); if (!handleStatement(s, visited)) { return(Boolean.FALSE); } s = s.getUserData(typeof(StatementNode)); } while (s != null && !visited.contains(s)); return(Boolean.TRUE); }
/** * Stores the specified domain for the specified variable if a domain has * not yet been stored for the variable. */ public void storeDomainFor(Variable var, Domain domain) { if (!affectedVariables.contains(var)) { savedDomains.Add(new Pair <Variable, Domain>(var, domain)); affectedVariables.Add(var); } }
public void TestMultiWord() { FuzzyLikeThisQuery flt = new FuzzyLikeThisQuery(10, analyzer); flt.AddTerms("jonathin smoth", "name", 0.3f, 1); Query q = flt.Rewrite(searcher.IndexReader); HashSet <Term> queryTerms = new HashSet <Term>(); q.ExtractTerms(queryTerms); assertTrue("Should have variant jonathan", queryTerms.contains(new Term("name", "jonathan"))); assertTrue("Should have variant smith", queryTerms.contains(new Term("name", "smith"))); TopDocs topDocs = searcher.Search(flt, 1); ScoreDoc[] sd = topDocs.ScoreDocs; assertTrue("score docs must match 1 doc", (sd != null) && (sd.Length > 0)); Document doc = searcher.Doc(sd[0].Doc); assertEquals("Should match most similar when using 2 words", "2", doc.Get("id")); }
public void TestClosestEditDistanceMatchComesFirst() { FuzzyLikeThisQuery flt = new FuzzyLikeThisQuery(10, analyzer); flt.AddTerms("smith", "name", 0.3f, 1); Query q = flt.Rewrite(searcher.IndexReader); HashSet <Term> queryTerms = new HashSet <Term>(); q.ExtractTerms(queryTerms); assertTrue("Should have variant smythe", queryTerms.contains(new Term("name", "smythe"))); assertTrue("Should have variant smith", queryTerms.contains(new Term("name", "smith"))); assertTrue("Should have variant smyth", queryTerms.contains(new Term("name", "smyth"))); TopDocs topDocs = searcher.Search(flt, 1); ScoreDoc[] sd = topDocs.ScoreDocs; assertTrue("score docs must match 1 doc", (sd != null) && (sd.Length > 0)); Document doc = searcher.Doc(sd[0].Doc); assertEquals("Should match most similar not most rare variant", "2", doc.Get("id")); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in C#: //ORIGINAL LINE: public void calculate(boolean paramBoolean) throws org.boris.expr.ExprException public override void calculate(bool paramBoolean) { if (this.autoCalculate && !paramBoolean) { return; } this.graph.sort(); IDictionary <string, ResourceToAssignmentTable> .KeyCollection hashSet = new HashSet <object>(this.assignmentsWithFormulasMap.Keys); foreach (Range range in this.graph) { string str = range.Dimension1Name; bool @bool = hashSet.contains(str); if (@bool) { hashSet.remove(str); } Expr expr = (Expr)this.inputs[range]; if (expr is ExprEvaluatable) { if (@bool) { CurrentField = "QTYPUNITFORM"; ResourceToAssignmentTable resourceToAssignmentTable = (ResourceToAssignmentTable)this.assignmentsWithFormulasMap[str]; if (resourceToAssignmentTable.getResourceTable() is AssemblyTable) { AssemblyTable assemblyTable = (AssemblyTable)resourceToAssignmentTable.getResourceTable(); loadLocalVariables(assemblyTable.Vars); } else if (resourceToAssignmentTable.getResourceTable() is BoqItemTable) { BoqItemTable boqItemTable = (BoqItemTable)resourceToAssignmentTable.getResourceTable(); loadLocalVariables(boqItemTable.Vars); } } Expr expr1 = ((ExprEvaluatable)expr).evaluate(this); this.provider.valueChanged(range, expr1); if (@bool) { ((ResourceToAssignmentTable)this.assignmentsWithFormulasMap[str]).PvVars = (string.ReferenceEquals(PvVars, null)) ? "" : PvVars; } this.values[range] = expr1; } } foreach (string str in hashSet) { CurrentField = "QTYPUNITFORM"; ResourceToAssignmentTable resourceToAssignmentTable = (ResourceToAssignmentTable)this.assignmentsWithFormulasMap[str]; Expr expr = parseStatement(resourceToAssignmentTable.QuantityPerUnitFormula); Range range = new Range(str); this.provider.valueChanged(range, expr); this.values[range] = expr; resourceToAssignmentTable.PvVars = (string.ReferenceEquals(PvVars, null)) ? "" : PvVars; } }
public void TestDefaultFilter() { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); HashSet <string> results = new HashSet <string>(); ScoreDoc[] hits = searcher.Search(tq, df, 1000).ScoreDocs; foreach (ScoreDoc hit in hits) { Document d = searcher.Doc(hit.Doc); string url = d.Get(KEY_FIELD); assertFalse("No duplicate urls should be returned", results.contains(url)); results.add(url); } }
public virtual void _minimizeRuleSet(HashSet <string> ruleDefs, HashSet <string> ruleRefs, CompositeGrammarTree p) { var localRuleDefs = new HashSet <string>(); foreach (Rule r in p.grammar.Rules) { if (!ruleDefs.contains(r.name)) { localRuleDefs.add(r.name); ruleDefs.add(r.name); } } System.Console.Out.WriteLine("rule defs for " + p.grammar.name + ": " + localRuleDefs); // remove locally-defined rules not in ref set // find intersection of local rules and references from delegator // that is set of rules needed by delegator HashSet <string> localRuleDefsSatisfyingRefsFromBelow = new HashSet <string>(); foreach (string r in ruleRefs) { if (localRuleDefs.contains(r)) { localRuleDefsSatisfyingRefsFromBelow.add(r); } } // now get list of refs from localRuleDefsSatisfyingRefsFromBelow. // Those rules are also allowed in this delegate foreach (GrammarAST refAST in p.grammar.ruleRefs) { if (localRuleDefsSatisfyingRefsFromBelow.contains(refAST.enclosingRuleName)) { // found rule ref within needed rule } } // remove rule refs not in the new rule def set // walk all children, adding rules not already defined if (p.children != null) { foreach (CompositeGrammarTree @delegate in p.children) { _minimizeRuleSet(ruleDefs, ruleRefs, @delegate); } } }
public char[] clear(String str) { char[] cs = (str + " ").toLowerCase().toCharArray(); for (int i = 0; i < cs.Length; i++) { if (cs[i] == '"') { continue; } if (set.contains(cs[i])) { cs[i] = ' '; } } return(cs); }
private void checkHits(SpatialArgs args, int assertNumFound, int[] assertIds) { SearchResults got = executeQuery(strategy.MakeQuery(args), 100); assertEquals("" + args, assertNumFound, got.numFound); if (assertIds != null) { ISet <int?> gotIds = new HashSet <int?>(); foreach (SearchResult result in got.results) { gotIds.add(int.Parse(result.document.Get("id"), CultureInfo.InvariantCulture)); } foreach (int assertId in assertIds) { assertTrue("has " + assertId, gotIds.contains(assertId)); } } }
public void TestFastFilter() { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); df.ProcessingMode = (ProcessingMode.PM_FAST_INVALIDATION); HashSet <string> results = new HashSet <string>(); ScoreDoc[] hits = searcher.Search(tq, df, 1000).ScoreDocs; assertTrue("Filtered searching should have found some matches", hits.Length > 0); foreach (ScoreDoc hit in hits) { Document d = searcher.Doc(hit.Doc); string url = d.Get(KEY_FIELD); assertFalse("No duplicate urls should be returned", results.contains(url)); results.add(url); } assertEquals("Two urls found", 2, results.size()); }
/** * Locate the triangle with point (a Pnt) inside (or on) it. * @param point the Pnt to locate * @return triangle (Simplex<Pnt>) that holds the point; null if no such triangle */ public Simplex locate(Pnt point) { Simplex triangle = mostRecent; if (!this.contains(triangle)) { triangle = null; } // Try a directed walk (this works fine in 2D, but can fail in 3D) Set visited = new HashSet(); while (triangle != null) { if (visited.contains(triangle)) // This should never happen { Console.WriteLine("Warning: Caught in a locate loop"); break; } visited.add(triangle); // Corner opposite point Pnt corner = point.isOutside((Pnt[])triangle.toArray(new Pnt[0])); if (corner == null) { return(triangle); } triangle = this.neighborOpposite(corner, triangle); } // No luck; try brute force Console.WriteLine("Warning: Checking all triangles for " + point); for (Iterator it = this.iterator(); it.hasNext();) { Simplex tri = (Simplex)it.next(); if (point.isOutside((Pnt[])tri.toArray(new Pnt[0])) == null) { return(tri); } } // No such triangle Console.WriteLine("Warning: No triangle holds " + point); return(null); }
public void TestNoFilter() { HashSet <string> results = new HashSet <string>(); ScoreDoc[] hits = searcher.Search(tq, null, 1000).ScoreDocs; assertTrue("Default searching should have found some matches", hits.Length > 0); bool dupsFound = false; foreach (ScoreDoc hit in hits) { Document d = searcher.Doc(hit.Doc); string url = d.Get(KEY_FIELD); if (!dupsFound) { dupsFound = results.contains(url); } results.add(url); } assertTrue("Default searching should have found duplicate urls", dupsFound); }
public object Create(Random random) { NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder(); // we can't add duplicate keys, or NormalizeCharMap gets angry ISet <string> keys = new HashSet <string>(); int num = random.nextInt(5); //System.out.println("NormalizeCharMap="); for (int i = 0; i < num; i++) { string key = TestUtil.RandomSimpleString(random); if (!keys.contains(key) && key.Length > 0) { string value = TestUtil.RandomSimpleString(random); builder.Add(key, value); keys.add(key); //System.out.println("mapping: '" + key + "' => '" + value + "'"); } } return(builder.Build()); }
/** * Report the boundary of a Set of Simplices. * The boundary is a Set of facets where each facet is a Set of vertices. * @return an Iterator for the facets that make up the boundary */ public static Set boundary(Set simplexSet) { Set theBoundary = new HashSet(); for (Iterator it = simplexSet.iterator(); it.hasNext();) { Simplex simplex = (Simplex)it.next(); for (Iterator otherIt = simplex.facets().iterator(); otherIt.hasNext();) { Set facet = (Set)otherIt.next(); if (theBoundary.contains(facet)) { theBoundary.remove(facet); } else { theBoundary.add(facet); } } } return(theBoundary); }
private void _CheckHits(bool bbox, IPoint pt, double distKM, int assertNumFound, params int[] assertIds) { SpatialOperation op = SpatialOperation.Intersects; double distDEG = DistanceUtils.Dist2Degrees(distKM, DistanceUtils.EARTH_MEAN_RADIUS_KM); IShape shape = ctx.MakeCircle(pt, distDEG); if (bbox) { shape = shape.BoundingBox; } SpatialArgs args = new SpatialArgs(op, shape); //args.setDistPrecision(0.025); Query query; if (Random().nextBoolean()) { query = strategy.MakeQuery(args); } else { query = new FilteredQuery(new MatchAllDocsQuery(), strategy.MakeFilter(args)); } SearchResults results = executeQuery(query, 100); assertEquals("" + shape, assertNumFound, results.numFound); if (assertIds != null) { ISet <int?> resultIds = new HashSet <int?>(); foreach (SearchResult result in results.results) { resultIds.add(int.Parse(result.document.Get("id"), CultureInfo.InvariantCulture)); } foreach (int assertId in assertIds) { assertTrue("has " + assertId, resultIds.contains(assertId)); } } }
public virtual void dumpRandomSentences(string path, int count) { try { HashSet hashSet = new HashSet(); PrintWriter printWriter = new PrintWriter(new FileOutputStream(path)); for (int i = 0; i < count; i++) { string randomSentence = this.getRandomSentence(); if (!hashSet.contains(randomSentence)) { hashSet.add(randomSentence); printWriter.println(randomSentence); } } printWriter.close(); } catch (IOException ex) { this.logger.severe(new StringBuilder().append("Can't write random sentences to ").append(path).append(' ').append(ex).toString()); } }
private void findNextNonHMMArc(SearchStateArc searchStateArc, List list) { HashSet hashSet = new HashSet(); ArrayList arrayList = new ArrayList(); arrayList.add(searchStateArc); while (!arrayList.isEmpty()) { SearchStateArc searchStateArc2 = (SearchStateArc)arrayList.remove(0); if (!hashSet.contains(searchStateArc2)) { hashSet.add(searchStateArc2); if (!(searchStateArc2.getState() is HMMSearchState)) { list.add(searchStateArc2); } else { arrayList.addAll(Arrays.asList(searchStateArc2.getState().getSuccessors())); } } } }
private static void depthFirstSearch(Fst fst, HashSet hashSet, ArrayList arrayList, ArrayList[] array, HashSet hashSet2) { State start = fst.getStart(); State state = start; do { if (!hashSet.contains(start)) { state = Connect.depthFirstSearchNext(fst, start, arrayList, array, hashSet); } }while (start.getId() != state.getId()); int numStates = fst.getNumStates(); for (int i = 0; i < numStates; i++) { State state2 = fst.getState(i); if (state2.getFinalWeight() != fst.getSemiring().zero()) { Connect.calcCoAccessible(fst, state2, arrayList, hashSet2); } } }
public virtual void dumpRandomSentences(int count) { HashSet hashSet = new HashSet(); for (int i = 0; i < count; i++) { string randomSentence = this.getRandomSentence(); if (!hashSet.contains(randomSentence)) { hashSet.add(randomSentence); } } ArrayList arrayList = new ArrayList(hashSet); Collections.sort(arrayList); Iterator iterator = arrayList.iterator(); while (iterator.hasNext()) { string text = (string)iterator.next(); [email protected](text); } }
public virtual void runTestQuery(SpatialMatchConcern concern, SpatialTestQuery q) { String msg = q.toString(); //"Query: " + q.args.toString(ctx); SearchResults got = executeQuery(makeQuery(q), Math.Max(100, q.ids.size() + 1)); if (storeShape && got.numFound > 0) { //check stored value is there assertNotNull(got.results[0].document.Get(strategy.FieldName)); } if (concern.orderIsImportant) { IEnumerator<String> ids = q.ids.GetEnumerator(); foreach (SearchResult r in got.results) { String id = r.document.Get("id"); if (!ids.MoveNext()) { fail(msg + " :: Did not get enough results. Expect" + q.ids + ", got: " + got.toDebugString()); } assertEquals("out of order: " + msg, ids.Current, id); } if (ids.MoveNext()) { fail(msg + " :: expect more results then we got: " + ids.Current); } } else { // We are looking at how the results overlap if (concern.resultsAreSuperset) { ISet<string> found = new HashSet<string>(); foreach (SearchResult r in got.results) { found.add(r.document.Get("id")); } foreach (String s in q.ids) { if (!found.contains(s)) { fail("Results are mising id: " + s + " :: " + found); } } } else { List<string> found = new List<string>(); foreach (SearchResult r in got.results) { found.Add(r.document.Get("id")); } // sort both so that the order is not important CollectionUtil.TimSort(q.ids); CollectionUtil.TimSort(found); assertEquals(msg, q.ids.toString(), found.toString()); } } }
private void checkHits(SpatialArgs args, int assertNumFound, int[] assertIds) { SearchResults got = executeQuery(strategy.MakeQuery(args), 100); assertEquals("" + args, assertNumFound, got.numFound); if (assertIds != null) { ISet<int?> gotIds = new HashSet<int?>(); foreach (SearchResult result in got.results) { gotIds.add(int.Parse(result.document.Get("id"), CultureInfo.InvariantCulture)); } foreach (int assertId in assertIds) { assertTrue("has " + assertId, gotIds.contains(assertId)); } } }
public virtual void _minimizeRuleSet( HashSet<string> ruleDefs, HashSet<string> ruleRefs, CompositeGrammarTree p ) { var localRuleDefs = new HashSet<string>(); foreach ( Rule r in p.grammar.Rules ) { if ( !ruleDefs.contains( r.name ) ) { localRuleDefs.add( r.name ); ruleDefs.add( r.name ); } } System.Console.Out.WriteLine( "rule defs for " + p.grammar.name + ": " + localRuleDefs ); // remove locally-defined rules not in ref set // find intersection of local rules and references from delegator // that is set of rules needed by delegator HashSet<string> localRuleDefsSatisfyingRefsFromBelow = new HashSet<string>(); foreach ( string r in ruleRefs ) { if ( localRuleDefs.contains( r ) ) { localRuleDefsSatisfyingRefsFromBelow.add( r ); } } // now get list of refs from localRuleDefsSatisfyingRefsFromBelow. // Those rules are also allowed in this delegate foreach ( GrammarAST refAST in p.grammar.ruleRefs ) { if ( localRuleDefsSatisfyingRefsFromBelow.contains( refAST.enclosingRuleName ) ) { // found rule ref within needed rule } } // remove rule refs not in the new rule def set // walk all children, adding rules not already defined if ( p.children != null ) { foreach ( CompositeGrammarTree @delegate in p.children ) { _minimizeRuleSet( ruleDefs, ruleRefs, @delegate ); } } }