public void OrderedHashSet_Test() { var hashSet = new OrderedHashSet <int>(); int nodeCount = 1000; //insert test for (int i = 0; i <= nodeCount; i++) { hashSet.Add(i); Assert.AreEqual(true, hashSet.Contains(i)); } //IEnumerable test using linq Assert.AreEqual(hashSet.Count, hashSet.Count()); Assert.AreEqual(hashSet.Count, hashSet.AsEnumerableDesc().Count()); for (int i = 0; i <= nodeCount; i++) { hashSet.Remove(i); Assert.AreEqual(false, hashSet.Contains(i)); } //IEnumerable test using linq Assert.AreEqual(hashSet.Count, hashSet.Count()); Assert.AreEqual(hashSet.Count, hashSet.AsEnumerableDesc().Count()); var rnd = new Random(); var testSeries = Enumerable.Range(1, nodeCount).OrderBy(x => rnd.Next()).ToList(); foreach (var item in testSeries) { hashSet.Add(item); Assert.AreEqual(true, hashSet.Contains(item)); } //IEnumerable test using linq Assert.AreEqual(hashSet.Count, hashSet.Count()); Assert.AreEqual(hashSet.Count, hashSet.AsEnumerableDesc().Count()); for (int i = 1; i <= nodeCount; i++) { hashSet.Remove(i); Assert.AreEqual(false, hashSet.Contains(i)); } //IEnumerable test using linq Assert.AreEqual(hashSet.Count, hashSet.Count()); Assert.AreEqual(hashSet.Count, hashSet.AsEnumerableDesc().Count()); }
public string resolveLocalImport(string name, TSFileAdditionalInfo parentInfo) { var dirPath = PathUtils.Parent(name); var fileOnly = name.Substring(dirPath.Length + 1); var dc = _owner.DiskCache.TryGetItem(dirPath) as IDirectoryCache; if (dc == null || dc.IsInvalid) { return(null); } var item = ExtensionsToImport.Select(ext => dc.TryGetChild(fileOnly + ext) as IFileCache).FirstOrDefault(i => i != null && !i.IsInvalid); if (item == null) { return(null); } if (item.FullPath.Substring(0, name.Length) != name) { parentInfo.ReportDiag(false, 1, "Local import has wrong casing '" + name + "' on disk '" + item.FullPath + "'", 0, 0, 0, 0); } var itemInfo = TSFileAdditionalInfo.Get(item, _owner.DiskCache); parentInfo.ImportingLocal(itemInfo); if (IsDts(item.FullPath)) { var jsItem = dc.TryGetChild(fileOnly + ".js") as IFileCache; if (jsItem != null) { var jsItemInfo = TSFileAdditionalInfo.Get(jsItem, _owner.DiskCache); jsItemInfo.Type = FileCompilationType.JavaScript; parentInfo.ImportingLocal(jsItemInfo); CheckAdd(jsItem.FullPath); } // implementation for .d.ts file does not have same name, it needs to be added to build by b.asset("lib.js") and cannot have dependencies } else { itemInfo.Type = FileCompilationType.TypeScript; AddSource(itemInfo); } CheckAdd(item.FullPath); TryToResolveFromBuildCache(itemInfo); Crawl(); if (itemInfo.DtsLink != null && !ToCompile.Contains(item.FullPath)) { return(itemInfo.DtsLink.Owner.FullPath); } return(item.FullPath); }
public bool CheckAdd(string fullNameWithExtension) { if (ToCheck.Contains(fullNameWithExtension)) { return(false); } ToCheck.Add(fullNameWithExtension); return(true); }
private void AddGranteeAndRoles(OrderedHashSet <Grantee> set) { set.Add(this); for (int i = 0; i < this.Roles.Size(); i++) { Grantee key = this.Roles.Get(i); if (!set.Contains(key)) { key.AddGranteeAndRoles(set); } } }
private static bool IsChildObjectResolved(ISchemaObject obj, OrderedHashSet <object> resolved) { OrderedHashSet <QNameManager.QName> references = obj.GetReferences(); for (int i = 0; i < references.Size(); i++) { QNameManager.QName name = references.Get(i); if (!SqlInvariants.IsSystemSchemaName(name) && !resolved.Contains(name)) { return(false); } } return(true); }
public void Contains_ReturnsFalse() { Random random = new Random(1); OrderedHashSet <string> ohs = new OrderedHashSet <string>(random); string s_1 = "hello"; string s_2 = "world"; ohs.Add(s_1); Assert.IsFalse(ohs.Contains(s_2)); }
public static bool ContainsAllColumns(OrderedHashSet <string> columnSet, Table table, bool[] columnCheckList) { for (int i = 0; i < columnCheckList.Length; i++) { if (columnCheckList[i]) { if (columnSet == null) { return(false); } if (!columnSet.Contains(table.GetColumn(i).GetName().Name)) { return(false); } } } return(true); }
/** Add an NFA configuration to this DFA node. Add uniquely * an NFA state/alt/syntactic&semantic context (chain of invoking state(s) * and semantic predicate contexts). * * I don't see how there could be two configurations with same * state|alt|synCtx and different semantic contexts because the * semantic contexts are computed along the path to a particular state * so those two configurations would have to have the same predicate. * Nonetheless, the addition of configurations is unique on all * configuration info. I guess I'm saying that syntactic context * implies semantic context as the latter is computed according to the * former. * * As we add configurations to this DFA state, track the set of all possible * transition labels so we can simply walk it later rather than doing a * loop over all possible labels in the NFA. */ public virtual void AddNFAConfiguration(NFAState state, NFAConfiguration c) { if (_nfaConfigurations.Contains(c)) { return; } _nfaConfigurations.Add(c); // track min alt rather than compute later if (c.Alt < _minAltInConfigurations) { _minAltInConfigurations = c.Alt; } if (c.SemanticContext != SemanticContext.EmptySemanticContext) { _atLeastOneConfigurationHasAPredicate = true; } // update hashCode; for some reason using context.hashCode() also // makes the GC take like 70% of the CPU and is slow! _cachedHashCode += c.State + c.Alt; // update reachableLabels // We're adding an NFA state; check to see if it has a non-epsilon edge if (state.transition[0] != null) { Label label = state.transition[0].Label; if (!(label.IsEpsilon || label.IsSemanticPredicate)) { // this NFA state has a non-epsilon edge, track for fast // walking later when we do reach on this DFA state we're // building. _configurationsWithLabeledEdges.Add(c); if (state.transition[1] == null) { // later we can check this to ignore o-A->o states in closure c.SingleAtomTransitionEmanating = true; } AddReachableLabel(label); } } }
// Add snack for each new live chat void addSnacksByLivechat(LiveChatItem[] items) { // Iterate from backwards to find new chats for (int i = items.Length; i-- > 0;) { if (!seenChats.Contains(items[i].Id)) { SpawnSnack(); seenChats.Add(items[i].Id); if (seenChats.Count > MAX_CHAT_COUNT) { seenChats.RemoveAt(seenChats.Count - 1); } seenChatCount += 1; GameObject.Find("LikesText").GetComponent <Text>().text = "" + seenChatCount; } } }
public void UpdateConstraints(Table t, OrderedHashSet <QNameManager.QName> dropConstraints) { for (int i = t.ConstraintList.Length - 1; i >= 0; i--) { Constraint constraint = t.ConstraintList[i]; if (dropConstraints.Contains(constraint.GetName())) { t.RemoveConstraint(i); } else if (constraint.GetConstraintType() == 0) { this._database.schemaManager.GetUserTable(this._session, constraint.Core.MainTable.GetName()).GetConstraint(constraint.GetMainName().Name).Core = constraint.Core; } else if (constraint.GetConstraintType() == 1) { this._database.schemaManager.GetUserTable(this._session, constraint.Core.RefTable.GetName()).GetConstraint(constraint.GetRefName().Name).Core = constraint.Core; } } }
private void SetReferences() { OrderedHashSet <QNameManager.QName> set = new OrderedHashSet <QNameManager.QName>(); for (int i = 0; i < this.ParameterTypes.Length; i++) { set.AddAll(this.ParameterList.Get(i).GetReferences()); } if (this.statement != null) { set.AddAll(this.statement.GetReferences()); } if (set.Contains(this.GetSpecificName())) { set.Remove(this.GetSpecificName()); this.IsRecursive = true; } this._references = set; }
public bool CheckDeadlock(Session session, OrderedHashSet <Session> newWaits) { int num = session.WaitingSessions.Size(); int index = 0; while (index < num) { Session key = session.WaitingSessions.Get(index); if (!newWaits.Contains(key) && this.CheckDeadlock(key, newWaits)) { index++; } else { return(false); } } return(true); }
public TsFileAdditionalInfo?CheckAdd(string fullNameWithExtension, FileCompilationType compilationType) { if (!Result.Path2FileInfo.TryGetValue(fullNameWithExtension, out var info)) { var fc = Owner.DiskCache.TryGetItem(fullNameWithExtension) as IFileCache; if (fc == null || fc.IsInvalid) { return(null); } info = TsFileAdditionalInfo.Create(fc, Owner.DiskCache); info.Type = compilationType; MainResult.MergeCommonSourceDirectory(fc.FullPath); Result.Path2FileInfo.Add(fullNameWithExtension, info); } else { if (info.Owner.IsInvalid) { Result.Path2FileInfo.Remove(fullNameWithExtension); return(null); } } if (!ToCheck.Contains(fullNameWithExtension)) { ToCheck.Add(fullNameWithExtension); } if (info.Type == FileCompilationType.Unknown) { info.Type = compilationType; } if (info.Type == FileCompilationType.JavaScriptAsset) { if (Result.JavaScriptAssets.AddUnique(info) && _noDependencyCheck) { _noDependencyCheck = false; } } return(info); }
public IEnumerable <AggregationResult> Modify(ITuple tuple, IEnumerable <IFact> facts) { var results = new List <AggregationResult>(); foreach (var fact in facts) { var list = new OrderedHashSet <TResult>(); var oldList = _sourceToList[fact]; _sourceToList[fact] = list; var value = (IEnumerable <TResult>)_selector.Invoke(tuple, fact); foreach (var item in value) { list.Add(item); } foreach (var item in oldList) { if (!list.Contains(item) && RemoveRef(item) == 0) { results.Add(AggregationResult.Removed(item)); } } foreach (var item in list) { if (oldList.Contains(item)) { results.Add(AggregationResult.Modified(item)); } else if (AddRef(item) == 1) { results.Add(AggregationResult.Added(item)); } } } return(results); }
public void ListHashSetTest() { OrderedHashSet<string> set = new OrderedHashSet<string>(); Assert.AreEqual(set.Count(), 0); CollectionAssert.AreEqual(new string[] { }, set.ToArray()); Assert.IsFalse(set.Contains("a")); Assert.IsFalse(set.Contains("b")); set.Add("b"); Assert.AreEqual(set.Count(), 1); Assert.IsFalse(set.Contains("a")); Assert.IsTrue(set.Contains("b")); CollectionAssert.AreEqual(new string[] { "b" }, set.ToArray()); set.Add("a"); Assert.AreEqual(set.Count(), 2); Assert.IsTrue(set.Contains("a")); Assert.IsTrue(set.Contains("b")); CollectionAssert.AreEqual(new string[] { "b", "a" }, set.ToArray()); set.Add("b"); Assert.AreEqual(set.Count(), 2); Assert.IsTrue(set.Contains("a")); Assert.IsTrue(set.Contains("b")); CollectionAssert.AreEqual(new string[] { "b", "a" }, set.ToArray()); Assert.AreEqual(set[0], "b"); Assert.AreEqual(set[1], "a"); set.Remove("b"); CollectionAssert.AreEqual(new string[] { "a" }, set.ToArray()); Assert.IsTrue(set.Contains("a")); Assert.IsFalse(set.Contains("b")); set.Clear(); CollectionAssert.AreEqual(new string[] {}, set.ToArray()); Assert.IsFalse(set.Contains("a")); Assert.IsFalse(set.Contains("b")); }
public static void AddAllSql(OrderedHashSet <object> resolved, OrderedHashSet <object> unresolved, List <string> list, Iterator <object> it, OrderedHashSet <object> newResolved) { while (it.HasNext()) { ISchemaObject key = (ISchemaObject)it.Next(); OrderedHashSet <QNameManager.QName> references = key.GetReferences(); bool flag = true; for (int i = 0; i < references.Size(); i++) { QNameManager.QName name = references.Get(i); if (SqlInvariants.IsSystemSchemaName(name) || (name.schema == SqlInvariants.ModuleQname)) { continue; } int type = name.type; switch (type) { case 3: { if (!resolved.Contains(name)) { flag = false; } continue; } case 4: case 6: case 7: case 8: case 10: case 11: case 15: { continue; } case 5: { if (name.Parent == key.GetName()) { Constraint constraint = ((Table)key).GetConstraint(name.Name); if ((constraint.GetConstraintType() == 3) && !IsChildObjectResolved(constraint, resolved)) { flag = false; } } continue; } case 9: { if (key.GetSchemaObjectType() != 3) { break; } Table table1 = (Table)key; if (!IsChildObjectResolved(table1.GetColumn(table1.FindColumn(name.Name)), resolved)) { flag = false; } continue; } case 12: case 13: case 0x10: case 0x11: goto Label_0147; case 14: { if (name.schema != null) { goto Label_0147; } continue; } default: goto Label_013B; } if (!resolved.Contains(name.Parent)) { flag = false; } continue; Label_013B: if ((type != 0x18) && (type != 0x1b)) { continue; } Label_0147: if (!resolved.Contains(name)) { flag = false; } } if (!flag) { unresolved.Add(key); } else { QNameManager.QName specificName; if (((key.GetSchemaObjectType() == 0x10) || (key.GetSchemaObjectType() == 0x1b)) || (key.GetSchemaObjectType() == 0x11)) { specificName = ((Routine)key).GetSpecificName(); } else { specificName = key.GetName(); } resolved.Add(specificName); if (newResolved != null) { newResolved.Add(key); } if (key.GetSchemaObjectType() == 3) { list.AddRange(((Table)key).GetSql(resolved, unresolved)); } else { if (((key.GetSchemaObjectType() - 0x10) <= 1) && ((Routine)key).IsRecursive) { list.Add(((Routine)key).GetSqlDeclaration()); list.Add(((Routine)key).GetSqlAlter()); continue; } list.Add(key.GetSql()); } } } }
public Both Contains(int item) { Assert.IsTrue(List.Contains(item) == Set.Contains(item)); return(this); }
/** Add label uniquely and disjointly; intersection with * another set or int/char forces breaking up the set(s). * * Example, if reachable list of labels is [a..z, {k,9}, 0..9], * the disjoint list will be [{a..j,l..z}, k, 9, 0..8]. * * As we add NFA configurations to a DFA state, we might as well track * the set of all possible transition labels to make the DFA conversion * more efficient. W/o the reachable labels, we'd need to check the * whole vocabulary space (could be 0..\uFFFF)! The problem is that * labels can be sets, which may overlap with int labels or other sets. * As we need a deterministic set of transitions from any * state in the DFA, we must make the reachable labels set disjoint. * This operation amounts to finding the character classes for this * DFA state whereas with tools like flex, that need to generate a * homogeneous DFA, must compute char classes across all states. * We are going to generate DFAs with heterogeneous states so we * only care that the set of transitions out of a single state are * unique. :) * * The idea for adding a new set, t, is to look for overlap with the * elements of existing list s. Upon overlap, replace * existing set s[i] with two new disjoint sets, s[i]-t and s[i]&t. * (if s[i]-t is nil, don't add). The remainder is t-s[i], which is * what you want to add to the set minus what was already there. The * remainder must then be compared against the i+1..n elements in s * looking for another collision. Each collision results in a smaller * and smaller remainder. Stop when you run out of s elements or * remainder goes to nil. If remainder is non nil when you run out of * s elements, then add remainder to the end. * * Single element labels are treated as sets to make the code uniform. */ protected virtual void AddReachableLabel(Label label) { if (_reachableLabels == null) { _reachableLabels = new OrderedHashSet <Label>(); } /* * [email protected]("addReachableLabel to state "+dfa.decisionNumber+"."+stateNumber+": "+label.getSet().toString(dfa.nfa.grammar)); * [email protected]("start of add to state "+dfa.decisionNumber+"."+stateNumber+": " + * "reachableLabels="+reachableLabels.toString()); */ if (_reachableLabels.Contains(label)) { // exact label present return; } IIntSet t = label.Set; IIntSet remainder = t; // remainder starts out as whole set to add int n = _reachableLabels.Count; // only look at initial elements // walk the existing list looking for the collision for (int i = 0; i < n; i++) { Label rl = _reachableLabels[i]; /* * [email protected]("comparing ["+i+"]: "+label.toString(dfa.nfa.grammar)+" & "+ * rl.toString(dfa.nfa.grammar)+"="+ * intersection.toString(dfa.nfa.grammar)); */ if (!Label.Intersect(label, rl)) { continue; } //[email protected](label+" collides with "+rl); // For any (s_i, t) with s_i&t!=nil replace with (s_i-t, s_i&t) // (ignoring s_i-t if nil; don't put in list) // Replace existing s_i with intersection since we // know that will always be a non nil character class IIntSet s_i = rl.Set; IIntSet intersection = s_i.And(t); _reachableLabels[i] = new Label(intersection); // Compute s_i-t to see what is in current set and not in incoming IIntSet existingMinusNewElements = s_i.Subtract(t); //[email protected](s_i+"-"+t+"="+existingMinusNewElements); if (!existingMinusNewElements.IsNil) { // found a new character class, add to the end (doesn't affect // outer loop duration due to n computation a priori. Label newLabel = new Label(existingMinusNewElements); _reachableLabels.Add(newLabel); } /* * [email protected]("after collision, " + * "reachableLabels="+reachableLabels.toString()); */ // anything left to add to the reachableLabels? remainder = t.Subtract(s_i); if (remainder.IsNil) { break; // nothing left to add to set. done! } t = remainder; } if (!remainder.IsNil) { /* * [email protected]("before add remainder to state "+dfa.decisionNumber+"."+stateNumber+": " + * "reachableLabels="+reachableLabels.toString()); * [email protected]("remainder state "+dfa.decisionNumber+"."+stateNumber+": "+remainder.toString(dfa.nfa.grammar)); */ Label newLabel = new Label(remainder); _reachableLabels.Add(newLabel); } /* * [email protected]("#END of add to state "+dfa.decisionNumber+"."+stateNumber+": " + * "reachableLabels="+reachableLabels.toString()); */ }
public void ListHashSetTest() { OrderedHashSet <string> set = new OrderedHashSet <string>(); Assert.AreEqual(set.Count(), 0); CollectionAssert.AreEqual(new string[] { }, set.ToArray()); Assert.IsFalse(set.Contains("a")); Assert.IsFalse(set.Contains("b")); set.Add("b"); Assert.AreEqual(set.Count(), 1); Assert.IsFalse(set.Contains("a")); Assert.IsTrue(set.Contains("b")); CollectionAssert.AreEqual(new string[] { "b" }, set.ToArray()); set.Add("a"); Assert.AreEqual(set.Count(), 2); Assert.IsTrue(set.Contains("a")); Assert.IsTrue(set.Contains("b")); CollectionAssert.AreEqual(new string[] { "b", "a" }, set.ToArray()); set.Add("b"); Assert.AreEqual(set.Count(), 2); Assert.IsTrue(set.Contains("a")); Assert.IsTrue(set.Contains("b")); CollectionAssert.AreEqual(new string[] { "b", "a" }, set.ToArray()); Assert.AreEqual(set[0], "b"); Assert.AreEqual(set[1], "a"); set.Remove("b"); CollectionAssert.AreEqual(new string[] { "a" }, set.ToArray()); Assert.IsTrue(set.Contains("a")); Assert.IsFalse(set.Contains("b")); set.Clear(); CollectionAssert.AreEqual(new string[] {}, set.ToArray()); Assert.IsFalse(set.Contains("a")); Assert.IsFalse(set.Contains("b")); }
bool IAlphaMemory <TFact> .Contains(TFact fact) { return(_facts.Contains(fact)); }
public bool Contains(Tuple tuple) { return(_tuples.Contains(tuple)); }
public bool Contains(Fact fact) { return(_facts.Contains(fact)); }
protected virtual IList<IIntSet> MakeEdgeSetsDisjoint( IList<IIntSet> edges ) { OrderedHashSet<IIntSet> disjointSets = new OrderedHashSet<IIntSet>(); // walk each incoming edge label/set and add to disjoint set int numEdges = edges.Count; for ( int e = 0; e < numEdges; e++ ) { IntervalSet t = (IntervalSet)edges[e]; if ( disjointSets.Contains( t ) ) { // exact set present continue; } // compare t with set i for disjointness IntervalSet remainder = t; // remainder starts out as whole set to add int numDisjointElements = disjointSets.Count; for ( int i = 0; i < numDisjointElements; i++ ) { IntervalSet s_i = (IntervalSet)disjointSets[i]; if ( t.And( s_i ).IsNil ) { // nothing in common continue; } //[email protected](label+" collides with "+rl); // For any (s_i, t) with s_i&t!=nil replace with (s_i-t, s_i&t) // (ignoring s_i-t if nil; don't put in list) // Replace existing s_i with intersection since we // know that will always be a non nil character class IntervalSet intersection = (IntervalSet)s_i.And( t ); disjointSets[i] = intersection; // Compute s_i-t to see what is in current set and not in incoming IIntSet existingMinusNewElements = s_i.Subtract( t ); //[email protected](s_i+"-"+t+"="+existingMinusNewElements); if ( existingMinusNewElements != null && !existingMinusNewElements.IsNil ) { // found a new character class, add to the end (doesn't affect // outer loop duration due to n computation a priori. disjointSets.Add( existingMinusNewElements ); } // anything left to add to the reachableLabels? remainder = (IntervalSet)t.Subtract( s_i ); if ( remainder.IsNil ) { break; // nothing left to add to set. done! } t = remainder; } if ( !remainder.IsNil ) { disjointSets.Add( remainder ); } } return disjointSets.GetElements(); }
public string ResolveLocalImport(string name, TSFileAdditionalInfo parentInfo, TSProject moduleInfo, string importedAsModule) { var dirPath = PathUtils.Parent(name); var fileOnly = name.Substring(dirPath.Length + 1); var dc = _owner.DiskCache.TryGetItemPreferReal(dirPath) as IDirectoryCache; if (dc == null || dc.IsInvalid) { return(null); } var isJson = false; var isCss = false; IFileCache item = null; if (fileOnly.EndsWith(".json")) { item = dc.TryGetChild(fileOnly, true) as IFileCache; if (item != null) { isJson = true; } } if (fileOnly.EndsWith(".css")) { item = dc.TryGetChild(fileOnly, true) as IFileCache; if (item != null) { isCss = true; } } if (item == null) { item = (parentInfo.Type == FileCompilationType.EsmJavaScript ? ExtensionsToImportFromJs : ExtensionsToImport).Select(ext => dc.TryGetChild(fileOnly + ext, true) as IFileCache) .FirstOrDefault(i => i != null && !i.IsInvalid); } if (item == null) { parentInfo.ReportDiag(false, -15, "Cannot resolve import '" + name + "'", 0, 0, 0, 0); return(null); } if (item.FullPath.Substring(0, name.Length) != name) { parentInfo.ReportDiag(false, -1, "Local import has wrong casing '" + name + "' on disk '" + item.FullPath + "'", 0, 0, 0, 0); } var itemInfo = TSFileAdditionalInfo.Get(item, _owner.DiskCache); parentInfo.ImportingLocal(itemInfo); itemInfo.MyProject = moduleInfo ?? parentInfo.MyProject; if (importedAsModule != null) { itemInfo.ImportedAsModule = importedAsModule; } if (isCss) { itemInfo.Type = FileCompilationType.ImportedCss; AddSource(itemInfo); CheckAdd(item.FullPath); var po = itemInfo.MyProject.ProjectOptions; if (!po.BundleCss) { if (itemInfo.OutputUrl == null) { itemInfo.OutputUrl = po.AllocateName(PathUtils.Subtract(item.FullPath, po.Owner.Owner.FullPath)); } } return(null); } if (IsDts(item.Name)) { if (dc.TryGetChild(fileOnly + ".js", true) is IFileCache jsItem) { var jsItemInfo = TSFileAdditionalInfo.Get(jsItem, _owner.DiskCache); jsItemInfo.Type = FileCompilationType.JavaScript; jsItemInfo.MyProject = itemInfo.MyProject; parentInfo.ImportingLocal(jsItemInfo); CheckAdd(jsItem.FullPath); } // implementation for .d.ts file does not have same name, it needs to be added to build by b.asset("lib.js") and cannot have dependencies } else { itemInfo.Type = isJson ? FileCompilationType.Json : parentInfo.Type; AddSource(itemInfo); } if (LocalResolveCache.TryGetValue(name, out var res)) { return(res); } CheckAdd(item.FullPath); TryToResolveFromBuildCache(itemInfo); if (itemInfo.DtsLink != null && !ToCompile.Contains(item.FullPath) && !itemInfo.NeedsCompilation()) { res = itemInfo.DtsLink.Owner.FullPath; } else { res = item.FullPath; } LocalResolveCache.Add(name, res); return(res); }