public void Enumerate() { Set<int> set = new Set<int>(); set.AddRange(1, 2, 3); List<int> exp = new List<int>(); exp.AddRange(new int[] { 1, 2, 3 }); CollectionAssert.AreEqual(exp, new List<int>(set)); }
public override Set<KeyValuePair<string, byte>> GetPreferredDTEPairs(Set<string> replacements, Set<KeyValuePair<string, byte>> currentPairs, Stack<byte> dteBytes) { Set<KeyValuePair<string, byte>> result = new Set<KeyValuePair<string, byte>>(); Set<KeyValuePair<string, byte>> ourCurrentPairs = new Set<KeyValuePair<string, byte>>(currentPairs); for (int i = 0; i < Sections.Count; i++) { result.AddRange(GetPreferredDTEPairsForSection(GetCopyOfSections(), i, replacements, ourCurrentPairs, dteBytes)); ourCurrentPairs.AddRange(result); } return result; }
public static void TestSet() { Set<string> names = new Set<string>(); names.Add("Jim"); names.Add("Jones"); Console.WriteLine("contains Jim? {0}", names.Contains("Jim")); Console.WriteLine("contains jim? {0}", names.Contains("jim")); names.Add("Jim"); Console.WriteLine(names.Count); names.AddRange(new string[] { "Jones", "Jonny", "Michael" }); Console.WriteLine(names.Count); }
public void MoreWritingSystemLists() { ILgWritingSystemFactory factWs = Cache.ServiceLocator.GetInstance<ILgWritingSystemFactory>(); char[] rgchSplit = new char[] { ' ' }; string sVern = factWs.GetStrFromWs(Cache.DefaultVernWs); Assert.IsTrue(Cache.LangProject.CurVernWss.Contains(sVern)); Assert.IsTrue(Cache.LangProject.VernWss.Contains(sVern)); Set<string> setVern = new Set<string>(); setVern.AddRange(Cache.LangProject.VernWss.Split(rgchSplit)); Assert.Less(0, setVern.Count, "should be at least one Vernacular WS"); Set<string> setCurVern = new Set<string>(); setCurVern.AddRange(Cache.LangProject.CurVernWss.Split(rgchSplit)); Assert.Less(0, setCurVern.Count, "should be at least one Current Vernacular WS"); Assert.LessOrEqual(setCurVern.Count, setVern.Count, "at least as many Current Vernacular as Vernacular"); foreach (string x in setCurVern) { Assert.IsTrue(setVern.Contains(x), "Vernacular contains everything in Current Vernacular"); int ws = factWs.GetWsFromStr(x); Assert.AreNotEqual(0, ws, "factory should contain everything in Current Vernacular"); } foreach (string x in setVern) { int ws = factWs.GetWsFromStr(x); Assert.AreNotEqual(0, ws, "factory should contain everything in Vernacular"); } string sAnal = factWs.GetStrFromWs(Cache.DefaultAnalWs); Assert.IsTrue(Cache.LangProject.CurAnalysisWss.Contains(sAnal)); Assert.IsTrue(Cache.LangProject.AnalysisWss.Contains(sAnal)); Set<string> setAnal = new Set<string>(); setAnal.AddRange(Cache.LangProject.AnalysisWss.Split(rgchSplit)); Assert.Less(0, setAnal.Count, "should be at least one Analysis WS"); Set<string> setCurAnal = new Set<string>(); setCurAnal.AddRange(Cache.LangProject.CurAnalysisWss.Split(rgchSplit)); Assert.Less(0, setCurAnal.Count, "should be at least one Current Analysis WS"); Assert.LessOrEqual(setCurAnal.Count, setAnal.Count, "at least as many Current Analysis as Analysis"); foreach (string x in setCurAnal) { Assert.IsTrue(setAnal.Contains(x), "Analysis contains everything in Current Analysis"); int ws = factWs.GetWsFromStr(x); Assert.AreNotEqual(0, ws, "factory should contain everything in Current Analysis"); } foreach (string x in setAnal) { int ws = factWs.GetWsFromStr(x); Assert.AreNotEqual(0, ws, "factory should contain everything in Analysis"); } }
public string ToString(string expName, PerfMetric metric, LblT lbl, string fmt) // set expName to null to get stats for all experiments { Utils.ThrowException(fmt == null ? new ArgumentNullException("fmt") : null); ArrayList <string> expList = new ArrayList <string>(mData.Keys); expList.Sort(); Set <string> tmp = new Set <string>(); foreach (ConcurrentDictionary <string, FoldData> item in mData.Values) { tmp.AddRange(item.Keys); } ArrayList <string> algoList = new ArrayList <string>(tmp); algoList.Sort(); StringBuilder sb = new StringBuilder(); // header if (expName == null) { foreach (string exp in expList) { sb.Append("\t" + exp); } sb.AppendLine(); } // rows foreach (string algoName in algoList) { sb.Append(algoName); if (expName == null) { foreach (string exp in expList) { double stdev; double val = GetAvg(exp, algoName, metric, out stdev); // throws InvalidOperationException sb.Append("\t" + val.ToString(fmt)); } } else { double stdev; sb.Append("\t" + GetAvg(expName, algoName, metric, out stdev).ToString(fmt)); // throws InvalidOperationException } sb.AppendLine(); } return(sb.ToString().TrimEnd()); }
public void Remove_Item_Test() { int expectedCount = 2; Set <int> set = new Set <int>(); List <int> items = new List <int>() { 1, 2, 3 }; set.AddRange(items); set.Remove(3); Assert.AreEqual(set.Count, expectedCount); }
/// <summary> /// Takes the list of AffectedCcas fed into the dialog and the list of user selected wfics /// and updates the AffectedCcas list in the parameter object /// </summary> internal void SetAffectedCcas(int[] selectedWfics) { Set <int> selWfics = new Set <int>(); selWfics.AddRange(selectedWfics); List <int> ccas = new List <int>(); foreach (int hvoCca in SentElem.AffectedCcas) { if (selWfics.Intersection(Cache.GetVectorProperty(hvoCca, kflidAppliesTo, false)).Count > 0) { ccas.Add(hvoCca); } } SentElem.AffectedCcas = ccas; }
public string ToString(int foldNum, string expName, PerfMetric metric, LblT lbl, string fmt) // set expName to null to get stats for all experiments { Utils.ThrowException(foldNum < 1 ? new ArgumentOutOfRangeException("foldNum") : null); Utils.ThrowException(fmt == null ? new ArgumentNullException("fmt") : null); ArrayList <string> expList = new ArrayList <string>(mData.Keys); expList.Sort(); Set <string> tmp = new Set <string>(); foreach (Dictionary <string, FoldData> item in mData.Values) { tmp.AddRange(item.Keys); } ArrayList <string> algoList = new ArrayList <string>(tmp); algoList.Sort(); StringBuilder sb = new StringBuilder(); // header if (expName == null) { foreach (string exp in expList) { sb.Append("\t" + exp); } sb.AppendLine(); } // rows foreach (string algoName in algoList) { sb.Append(algoName); if (expName != null) { double val = GetVal(foldNum, expName, algoName, metric, lbl); sb.Append("\t" + val.ToString(fmt)); } else { foreach (string exp in expList) { sb.Append("\t" + GetVal(foldNum, exp, algoName, metric, lbl).ToString(fmt)); } } sb.AppendLine(); } return(sb.ToString().TrimEnd()); }
public IEnumerable <TEntity> Add(IEnumerable <TEntity> entities) { foreach (var entity in entities) { if (entity is IObservableEntity) { (entity as IObservableEntity).CreatedAt = DateTime.UtcNow; (entity as IObservableEntity).UpdatedAt = DateTime.UtcNow; } if (entity is IRemovableEntity) { (entity as IRemovableEntity).IsRemoved = false; } } return(Set.AddRange(entities)); }
/// <summary> /// Gets verbs in this class /// </summary> /// <param name="recursive">Whether or not to recursively get verbs from all children</param> /// <returns>Set of verbs</returns> public Set <string> GetVerbs(bool recursive) { // start with verbs from the current class Set <string> verbs = _verbs.Copy(); // add child class verbs recursively if (recursive) { verbs.ThrowExceptionOnDuplicateAdd = false; foreach (VerbClass child in GetChildren(true)) { verbs.AddRange(child._verbs); } } return(verbs); }
public void testSet2() { List <string> lst = new List <string>(); lst.Add("string1"); lst.Add("string2"); lst.Add("string1"); lst.Add("string2"); lst.Add("string1"); Assert.AreEqual(lst.Count, 5); Set <string> set = new Set <string>(lst); Assert.AreEqual(set.Count, 2); set.AddRange(lst); Assert.AreEqual(set.Count, 2); }
public IEnumerable <ushort> PropagateTags() { if (mTags != null) { return(mTags); } Set <ushort> tags = new Set <ushort>(); if (mChildren != null) { foreach (Node child in mChildren) { tags.AddRange(child.PropagateTags()); } } mTags = new ArrayList <ushort>(tags); return(tags); }
// requires: this to correspond to a cell relation for an association set (m_cellQuery.Extent) // effects: Adds any key constraints present in this relation in // constraints private void PopulateKeyConstraintsForRelationshipSet(BasicSchemaConstraints constraints) { var relationshipSet = m_cellQuery.Extent as AssociationSet; // Gather all members of all keys // CHANGE_ADYA_FEATURE_KEYS: assume that an Entity has exactly one key. Otherwise we // have to take a cross-product of all keys // Keep track of all the key members for the association in a set // so that if no end corresponds to a key, we use all the members // to form the key var associationKeyMembers = new Set <MemberPath>(MemberPath.EqualityComparer); var hasAnEndThatFormsKey = false; // Determine the keys of each end. If the end forms a key, add it // as a key to the set foreach (var end in relationshipSet.AssociationSetEnds) { var endMember = end.CorrespondingAssociationEndMember; var prefix = new MemberPath(relationshipSet, endMember); var keys = ExtentKey.GetKeysForEntityType(prefix, end.EntitySet.ElementType); Debug.Assert(keys.Count > 0, "No keys for entity?"); Debug.Assert(keys.Count == 1, "Currently, we only support primary keys"); if (MetadataHelper.DoesEndFormKey(relationshipSet, endMember)) { // This end has is a key end AddKeyConstraints(keys, constraints); hasAnEndThatFormsKey = true; } // Add the members of the (only) key to associationKey associationKeyMembers.AddRange(keys[0].KeyFields); } // If an end forms a key then that key implies the full key if (false == hasAnEndThatFormsKey) { // No end is a key -- take all the end members and make a key // based on that var key = new ExtentKey(associationKeyMembers); var keys = new[] { key }; AddKeyConstraints(keys, constraints); } }
void FilterList(WebData webData, ITable table, string nameField, string guidField, string text) { var ids = new Set <long>(); if (text == null) { ids.IncludeRange(table.FindRows(Search.None, ResultOption.Limit(20))); } else { ids.AddRange(table.FindRows(Search.FieldLike(nameField, MDBSearch.Text(text + "%")) & Search.FieldNotEquals(guidField, null), ResultOption.SortAscending(nameField) + ResultOption.Group(nameField) + ResultOption.Group(guidField) + ResultOption.Limit(20))); if (ids.Count < 20) { ids.IncludeRange(table.FindRows(Search.FieldLike(nameField, MDBSearch.Text("% " + text + "%")) & Search.FieldNotEquals(guidField, null), ResultOption.SortAscending(nameField) + ResultOption.Group(nameField) + ResultOption.Group(guidField) + ResultOption.Limit(20))); } if (ids.Count < 20) { ids.IncludeRange(table.FindRows(Search.FieldLike(nameField, MDBSearch.Text(text + "%")) & Search.FieldEquals(guidField, null), ResultOption.SortAscending(nameField) + ResultOption.Group(nameField) + ResultOption.Limit(20 - ids.Count))); } if (ids.Count < 20) { ids.IncludeRange(table.FindRows(Search.FieldLike(nameField, MDBSearch.Text("% " + text + "%")) & Search.FieldEquals(guidField, null), ResultOption.SortAscending(nameField) + ResultOption.Group(nameField) + ResultOption.Limit(20 - ids.Count))); } } var json = new JsonWriter(); json.BeginArray("results"); if (ids.Count > 0) { //get items var values = table.GetValues <string>(nameField, false, ids.SubRange(0, Math.Min(20, ids.Count))); foreach (var value in values) { json.BeginObject(); json.String("id", value); json.String("text", value); json.EndObject(); } } json.EndArray(); var message = WebMessage.Create(webData.Method, $"Filter {nameField} {text}"); webData.Answer = WebAnswer.Json(webData.Request, message, json.ToString()); }
/// <summary> /// Gets support verb nodes. Only valid for root nodes. /// </summary> /// <param name="includeNullElementNodes">Whether or not to include null-element support verb nodes</param> /// <returns>Set of support verb nodes</returns> public Set <NomBankNode> GetSupportVerbNodes(bool includeNullElementNodes) { if (!IsRoot) { throw new Exception("Current node is not a root node"); } Set <NomBankNode> supportVerbNodes = new Set <NomBankNode>(false); // check each support verb node list foreach (NomBankLabeledNodeCollection nodeCollection in _labeledNodeCollections) { if (nodeCollection.Label.IsSupportVerb) { supportVerbNodes.AddRange(nodeCollection.GetNodes(includeNullElementNodes, true, false, true, false).Cast <NomBankNode>()); // support verbs are never actually split nor are they coreferential...just include everything } } return(supportVerbNodes); }
public void SpecificComparer1() { Set <object> set1 = new Set <object> (new ToStringEqualityComparer <object>()); set1.AddRange(new object[] { 1, 2, "a", "1", "2" }); Assert.That(set1.Count, Is.EqualTo(3)); Assert.That(set1.Contains(1), Is.True); Assert.That(set1.Contains(2), Is.True); Assert.That(set1.Contains("a"), Is.True); Assert.That(set1.Contains("1"), Is.True); Assert.That(set1.Contains("2"), Is.True); object[] array = set1.ToArray(); Assert.That(array.Length, Is.EqualTo(3)); Assert.That(array, Has.Member(1)); Assert.That(array, Has.Member(2)); Assert.That(array, Has.Member("a")); }
/// <summary> /// Gets examples from this class /// </summary> /// <param name="includeInherited">Whether or not to include inherited examples</param> /// <returns>Examples</returns> public Set <string> GetExamples(bool includeInherited) { // start with roles in this class Set <string> examples = _examples.Copy(); // optionally add inherited if (includeInherited) { examples.ThrowExceptionOnDuplicateAdd = false; VerbClass current = this; while ((current = current.Parent) != null) { examples.AddRange(current.GetExamples(false)); } } return(examples); }
/// <summary> /// We only add less equals /// </summary> public NonRelationalValueAbstraction <Variable, Expression> AssumeInformationFrom <Exp>(INumericalAbstractDomainQuery <Variable, Exp> oracle) { var result = this; if (this.IsNormal()) { #region Update < if (this.StrictUpperBounds.IsNormal()) { var newConstraints = new Set <Variable>(this.StrictUpperBounds.Values); foreach (var x in this.StrictUpperBounds.Values) { // Add S such that x <= S var newBounds = oracle.UpperBoundsFor(x, true).ApplyToAll(oracle.ToVariable); Contract.Assert(newBounds != null); newConstraints.AddRange(newBounds); } result = result.Update(ADomains.StrictUpperBounds, new SetOfConstraints <Variable>(newConstraints, false)); } #endregion #region Update <= if (this.WeakUpperBounds.IsNormal()) { var newConstraints = new Set <Variable>(this.WeakUpperBounds.Values); foreach (var x in this.WeakUpperBounds.Values) { // Add S such that x <= S var newBounds = oracle.UpperBoundsFor(x, false).ApplyToAll(oracle.ToVariable); Contract.Assert(newBounds != null); newConstraints.AddRange(newBounds); } result = result.Update(ADomains.WeakUpperBounds, new SetOfConstraints <Variable>(newConstraints, false)); } #endregion } return(result); }
// TODO: Insert into symbols overload // FIRST can be thought of as the extension of START, but often FIRST is defined for both single symbols // and sentential forms. That is FIRST is extended to all grammar symbols (i.e. sentential forms) // The FIRST function is a simple extension of START (single symbol) to the domain of sentential forms. // FIRST(α) = { x ∈ T | α ∗⇒ xβ } // An alternative definition which shows how to derive FIRST from START recursively is // FIRST(X1X2...Xk) = START(X1) ∪ FIRST(X2...Xk), if X1 is nullable // FIRST(X1X2...Xk) = START(X1) otherwise // FIRST(ε) = Ø = { } /// <summary> /// The First function yields the set of starter symbols for a sequence of grammar symbols. It is formally /// defined as /// FIRST(α) = { a ∈ T | α ∗⇒ aβ } /// for any sentential form α ∈ (T ∪ N)*. We have therefore extended the set-valued function to all sentential forms. /// </summary> /// <param name="analyzer"></param> /// <param name="symbols">The sequence of symbols (possibly empty, aka epsilon)</param> public static IReadOnlySet <Terminal <TTokenKind> > First <TTokenKind>( this IFirstSymbolsAnalyzer <TTokenKind> analyzer, IEnumerable <Symbol> symbols) where TTokenKind : struct, Enum { // If α is any string of grammar symbols, let First(α) be the set of terminals that begin the // strings derived from α. In some texts (dragon book) if α *=> ε, then ε is also in First(α). // We prefer to keep nullable in a separate Erasable function. var first = new Set <Terminal <TTokenKind> >(); foreach (var symbol in symbols) { first.AddRange(analyzer.First(symbol)); if (!analyzer.Erasable(symbol)) { break; } } return(first); }
/// <summary> /// Given a segment (for which we should have just loaded the wordforms), load any associated text tagging data /// </summary> /// <param name="hvoSeg"></param> private void LoadDataForTextTags(int hvoSeg) { // Get a 'real' Segment ISegment curSeg; try { curSeg = m_segRepository.GetObject(hvoSeg); if (curSeg.AnalysesRS == null || curSeg.AnalysesRS.Count == 0) { return; // small sanity check } } catch (KeyNotFoundException) { return; // Hmm... this could be a problem, but we'll just skip it for now. } // Get all AnalysisOccurrences in this Segment // Resharper says the following LINQ is equivalent. OK, I guess! //var segWords = new List<AnalysisOccurrence>(); //for (int i = 0; i < curSeg.AnalysesRS.Count; i++) // segWords.Add(new AnalysisOccurrence(curSeg, i)); var segWords = curSeg.AnalysesRS.Select((t, i) => new AnalysisOccurrence(curSeg, i)).ToList(); // Find all the tags for this Segment's AnalysisOccurrences and cache them var textTagList = InterlinTaggingChild.GetTaggingReferencingTheseWords(segWords); var occurrencesTagged = new Set <AnalysisOccurrence>(); foreach (var tag in textTagList) { occurrencesTagged.AddRange(tag.GetOccurrences()); CacheTagString(tag); } // now go through the list of occurrences that didn't have tags cached, and make sure they have empty strings cached var occurrencesWithoutTags = occurrencesTagged.SymmetricDifference(segWords); if (occurrencesWithoutTags != null) { CacheNullTagString(occurrencesWithoutTags); } }
/// <summary> /// Gets thematic roles in this class /// </summary> /// <param name="includeInherited">Whether or not to include inherited roles</param> /// <returns>Thematic roles</returns> public Set <VerbNetEngine.ThematicRole> GetThematicRoles(bool includeInherited) { // start with roles in this class Set <VerbNetEngine.ThematicRole> roles = _thematicRoles.Copy(); // optionally add inherited if (includeInherited) { roles.ThrowExceptionOnDuplicateAdd = false; VerbClass current = this; while ((current = current.Parent) != null) { roles.AddRange(current.GetThematicRoles(false)); } } return(roles); }
void RegisterVariable(IExpression messageExpr) { // get the set of indexVars Set <IVariableDeclaration> indexVars = new Set <IVariableDeclaration>(); object messageDecl = Recognizer.GetDeclaration(messageExpr); var varInfo = VariableInformation.GetVariableInformation(context, messageDecl); foreach (var bracket in varInfo.indexVars) { indexVars.AddRange(bracket); } TableInfo table; if (!tableOfIndexVars.TryGetValue(indexVars, out table)) { table = new TableInfo(); StringBuilder sb = new StringBuilder(); foreach (IVariableDeclaration indexVar in indexVars) { if (sb.Length > 0) { sb.Append("_"); } sb.Append(indexVar.Name); } if (sb.Length == 0) { sb.Append("scalar"); } table.Name = VariableInformation.GenerateName(context, sb.ToString()); for (int bracket = 0; bracket < varInfo.indexVars.Count; bracket++) { for (int i = 0; i < varInfo.indexVars[bracket].Length; i++) { table.indexVars.Add(varInfo.indexVars[bracket][i]); table.sizes.Add(varInfo.sizes[bracket][i]); } } tableOfIndexVars.Add(indexVars, table); } table.messageExprs.Add(messageExpr); }
public override Set <KeyValuePair <string, byte> > GetPreferredDTEPairs(Set <string> replacements, Set <KeyValuePair <string, byte> > currentPairs, Stack <byte> dteBytes) { Set <KeyValuePair <string, byte> > result = new Set <KeyValuePair <string, byte> >(); Set <KeyValuePair <string, byte> > ourCurrentPairs = new Set <KeyValuePair <string, byte> >(currentPairs); for (int i = 0; i < Sections.Count; i++) { Set <KeyValuePair <string, byte> > dtePairs = GetPreferredDTEPairsForSection(GetCopyOfSections(), i, replacements, ourCurrentPairs, dteBytes); if (dtePairs == null) { return(null); } else { result.AddRange(dtePairs); ourCurrentPairs.AddRange(result); } } return(result); }
/// <summary> /// This method assumes that a SemDomSearchCache has cached the Semantic Domains by /// search key (a Tuple of word string and writing system integer). It then takes the gloss, /// a short definition (if only one or two words), and reversal from a LexSense and uses those /// words as search keys to find Semantic Domains that have one of those words in /// their Name or Example Words fields. /// </summary> /// <param name="semDomCache"></param> /// <param name="sense"></param> /// <returns></returns> public IEnumerable <ICmSemanticDomain> FindCachedDomainsThatMatchWordsInSense( SemDomSearchCache semDomCache, ILexSense sense) { var strategy = new SenseSearchStrategy(Cache, sense); var results = new Set <ICmSemanticDomain>(); foreach (var keyValuePair in strategy.GetSearchKeysFromSense()) { foreach (var wordFromSense in keyValuePair.Value) { var cachedDomains = semDomCache.GetDomainsForCachedString(keyValuePair.Key, wordFromSense); if (cachedDomains == null) { continue; } results.AddRange(cachedDomains); } } return(results); }
private void PropagateTags(SuffixTrieNode node) { if (node.mChildren.Count == 1) { foreach (SuffixTrieNode child in node.mChildren.Values) { PropagateTags(child); node.mTags = child.mTags; } } else if (node.mChildren.Count > 1) { Set <string> tags = new Set <string>(); foreach (SuffixTrieNode child in node.mChildren.Values) { PropagateTags(child); tags.AddRange(child.mTags); } node.mTags = tags; } }
public Set <LblT> GetLabels(string expName, string algoName) { Utils.ThrowException(expName == null ? new ArgumentNullException("expName") : null); Utils.ThrowException(algoName == null ? new ArgumentNullException("algoName") : null); var labels = new Set <LblT>(); ConcurrentDictionary <string, FoldData> algoData; if (mData.TryGetValue(expName, out algoData)) { FoldData foldData; if (algoData.TryGetValue(algoName, out foldData)) { foreach (PerfMatrix <LblT> foldMtx in foldData.Where(m => m != null)) { labels.AddRange(foldMtx.GetLabels()); } } } return(labels); }
protected override IStatement DoConvertStatement(IStatement ist) { bool isTopLevel = context.InputAttributes.Has <DependencyInformation>(ist); if (isTopLevel) { Set <IVariableDeclaration> loopVars; if (loopVarsToReverseInStatement.TryGetValue(ist, out loopVars)) { loopVarsToReverse.AddRange(loopVars); } } IStatement st = base.DoConvertStatement(ist); if (isTopLevel && loopVarsToReverse.Count > 0) { replacements.Add(ist, st); loopMergingInfo.AddEquivalentStatement(st, loopMergingInfo.GetIndexOf(ist)); loopVarsToReverse.Clear(); } return(st); }
/// <summary> /// Reorder containers to follow their order in the input stack. Also removes duplicates. /// </summary> /// <param name="context"></param> /// <param name="start"></param> public void OrderByContext(BasicTransformContext context, int start = 0) { if (inputs.Count == 0) { return; } Set <IStatement> inputSet = new Set <IStatement>(new ContainerComparer()); inputSet.AddRange(inputs); inputs.Clear(); outputs.Clear(); for (int i = 0; i < context.InputStack.Count; i++) { TransformInfo ti = context.InputStack[i]; IStatement inputElement = ti.inputElement as IStatement; if (inputElement == null || !IsContainer(inputElement)) { continue; } inputElement = CreateContainer(inputElement); if (inputSet.Contains(inputElement)) { if (i >= start) { inputs.Add(inputElement); #if ignoreOutput outputs.Add(inputElement); #else outputs.Add((IStatement)ti.PrimaryOutput); #endif } inputSet.Remove(inputElement); } } // there may be containers that are not in the current context at all. put these on the inside. inputs.AddRange(inputSet); outputs.AddRange(inputSet); }
public IEnumerable <ISliceId> Dependences(ISliceId sliceId) { IEnumerable <ISliceId> res; lock (this.LockDependencesCache) { if (this.dependencesCache.TryGetValue(sliceId, out res)) { return(res); } } SliceDefinition sliceDef; if (!this.slices.TryGetValue(sliceId, out sliceDef)) { return(EmptySliceIdArray); // should not happen } var result = new Set <ISliceId>(); ISliceId depSliceId; foreach (var m in sliceDef.Dependencies) { if (this.sliceOfMethod.TryGetValue(m, out depSliceId)) { result.Add(depSliceId); } } lock (this.LockDependencesCache) { var callers = ComputeCallersDependencies(sliceId); result.AddRange(callers); this.dependencesCache.Add(sliceId, result); } return(result); }
private void GuaranteeForeignKeyConstraintInCSpace( QueryRewriter childRewriter, QueryRewriter parentRewriter, ErrorLog errorLog) { ViewgenContext viewgenContext1 = childRewriter.ViewgenContext; ViewgenContext viewgenContext2 = parentRewriter.ViewgenContext; CellTreeNode basicView1 = childRewriter.BasicView; CellTreeNode basicView2 = parentRewriter.BasicView; if (FragmentQueryProcessor.Merge(viewgenContext1.RightFragmentQP, viewgenContext2.RightFragmentQP).IsContainedIn(basicView1.RightFragmentQuery, basicView2.RightFragmentQuery)) { return; } LeftCellWrapper.GetExtentListAsUserString((IEnumerable <LeftCellWrapper>)basicView1.GetLeaves()); LeftCellWrapper.GetExtentListAsUserString((IEnumerable <LeftCellWrapper>)basicView2.GetLeaves()); string message = Strings.ViewGen_Foreign_Key_Not_Guaranteed_InCSpace((object)this.ToUserString()); Set <LeftCellWrapper> set = new Set <LeftCellWrapper>((IEnumerable <LeftCellWrapper>)basicView2.GetLeaves()); set.AddRange((IEnumerable <LeftCellWrapper>)basicView1.GetLeaves()); ErrorLog.Record record = new ErrorLog.Record(ViewGenErrorCode.ForeignKeyNotGuaranteedInCSpace, message, (IEnumerable <LeftCellWrapper>)set, string.Empty); errorLog.AddEntry(record); }
private void InferParameters(Set <EquationBlock> visited) { // Console.WriteLine("Visiting {0}", this.Block.Index); if (visited.Contains(this)) { return; } visited.Add(this); var newParameters = new Set <string>(this.formalParameters); foreach (EquationBody succ in this.bodies) { succ.To.InferParameters(visited); newParameters.AddRange(succ.To.formalParameters); } List <string> asList = new List <string>(newParameters); asList.Sort(); this.formalParameters = asList; }
public static List <T> SetUnion <T>(this List <T> me, List <T> other) { Contract.Requires(me != null); Contract.Requires(other != null); Contract.Ensures(Contract.Result <List <T> >() != null); if (me.Count == 0) { return(other); } if (other.Count == 0) { return(me); } var tmp = new Set <T>(me); tmp.AddRange(other); return(tmp.ToList()); }
internal static Set <Constant> ExpandNegationsInDomain( IEnumerable <Constant> domain, IEnumerable <Constant> otherPossibleValues) { Set <Constant> possibleValues = Domain.DeterminePossibleValues(domain, otherPossibleValues); Set <Constant> set1 = new Set <Constant>(Constant.EqualityComparer); foreach (Constant element in domain) { NegatedConstant negatedConstant = element as NegatedConstant; if (negatedConstant != null) { set1.Add((Constant) new NegatedConstant((IEnumerable <Constant>)possibleValues)); Set <Constant> set2 = possibleValues.Difference(negatedConstant.Elements); set1.AddRange((IEnumerable <Constant>)set2); } else { set1.Add(element); } } return(set1); }
static void Main(string[] args) { List <string> a = new List <string>() { "C", "D", "L" }; List <string> b = new List <string>() { "C", "L", "C", "D" }; var pairValuesNotEqual = from vara in a from varb in b where vara != varb select new Pair(vara, varb); Set sets = new Set(); sets.AddRange(pairValuesNotEqual); foreach (var item in sets) { Console.WriteLine(item.First + " - " + item.Second); } Console.ReadLine(); }
public void WritingSystemsLists() { List<IWritingSystem> list = new List<IWritingSystem>(); foreach (var x in Cache.LangProject.AllWritingSystems) list.Add(x); Assert.AreEqual(2, list.Count); ILgWritingSystemFactory factWs = Cache.ServiceLocator.GetInstance<ILgWritingSystemFactory>(); Assert.LessOrEqual(list.Count, factWs.NumberOfWs, "factory list is at least as large as AllWritingSystems"); Set<int> set = new Set<int>(); using (ArrayPtr rgwsT = MarshalEx.ArrayToNative<int>(factWs.NumberOfWs)) { factWs.GetWritingSystems(rgwsT, factWs.NumberOfWs); set.AddRange(MarshalEx.NativeToArray<int>(rgwsT, factWs.NumberOfWs)); } int wsEn = factWs.GetWsFromStr("en"); Assert.AreNotEqual(0, wsEn, "factory should contain English WS"); int wsFr = factWs.GetWsFromStr("fr"); Assert.AreNotEqual(0, wsFr, "factory should contain French WS"); IWritingSystem eng = null; IWritingSystem frn = null; foreach (var x in list) { Assert.IsTrue(set.Contains(x.Handle), "AllWritingSystems should be a subset of the factory list"); if (x.Handle == wsEn) eng = x; else if (x.Handle == wsFr) frn = x; } Assert.IsNotNull(eng, "AllWritingSystems should contain English"); Assert.AreEqual("English", factWs.get_EngineOrNull(wsEn).LanguageName); Assert.AreEqual("English", eng.LanguageName); Assert.IsNotNull(frn, "AllWritingSystems should contain French"); Assert.AreEqual("French", frn.LanguageName); Assert.AreEqual("French", factWs.get_Engine("fr").LanguageName); }
/// <summary> /// kludge: filter to allow only complex entry references. /// </summary> /// <param name="hvoItem"></param> /// <returns></returns> protected override bool DisableItem(int hvoItem) { if (m_complexEntryRefs == null) { m_complexEntryRefs = new Set<int>(); Dictionary<int, List<int>> dict = new Dictionary<int,List<int>>(); // go through each list and add the values to our set. foreach (List<int> refs in dict.Values) m_complexEntryRefs.AddRange(refs); } return !m_complexEntryRefs.Contains(hvoItem); }
internal Set<int> ItemsToChangeSet(bool fOnlyIfSelected) { CheckDisposed(); Set<int> itemsToChange = new Set<int>(); if (fOnlyIfSelected) { itemsToChange.AddRange(m_bv.CheckedItems); } else { itemsToChange.AddRange(m_bv.AllItems); } return itemsToChange; }
/// <summary> /// Make an array which contains all the members of first() plus those of second() that /// are not included in first. /// </summary> /// <param name="first"></param> /// <param name="second"></param> /// <returns></returns> public static int[] MergeTwoArrays(int[] first, int[] second) { Set<int> set = new Set<int>(first); set.AddRange(second); return set.ToArray(); }
internal override void AddViewWritingSystems(Set<ILgWritingSystem> visibleWss) { if (m_view.WritingSystemsToDisplay != null) { visibleWss.AddRange(m_view.WritingSystemsToDisplay); } }
// effects: Given a set of values in domain, "normalizes" it, i.e., // all positive constants are seperated out and any negative constant // is changed s.t. it is the negative of all positive values // extraValues indicates more constants that domain could take, e.g., // domain could be "1, 2, NOT(1, 2)", extraValues could be "3". In // this case, we return "1, 2, 3, NOT(1, 2, 3)" internal static CellConstantSet ExpandNegationsInDomain(IEnumerable<Constant> domain, IEnumerable<Constant> otherPossibleValues) { //Finds all constants referenced in (domain UNION extraValues) e.g: 1, NOT(2) => 1, 2 CellConstantSet possibleValues = DeterminePossibleValues(domain, otherPossibleValues); // For NOT --> Add all constants into d that are present in p but // not in the NOT // v = 1, NOT(1, 2); p = 1, 2, 3 => d = 1, NOT(1, 2, 3), 3 // v = 1, 2, NOT(1); p = 1, 2, 4 => d = 1, 2, 4, NOT(1, 2, 4) // v = 1, 2, NOT(1, 2, 4), NOT(1, 2, 4, 5); p = 1, 2, 4, 5, 6 => d = 1, 2, 5, 6, NOT(1, 2, 4, 5, 6) // NotNull works naturally now. If possibleValues has (1, 2, NULL) // and values has NOT(NULL), add 1, 2 to m_domain CellConstantSet result = new Set<Constant>(Constant.EqualityComparer); foreach (Constant constant in domain) { NegatedConstant negated = constant as NegatedConstant; if (negated != null) { result.Add(new NegatedConstant(possibleValues)); // Compute all elements in possibleValues that are not present in negated. E.g., if // negated is NOT(1, 2, 3) and possibleValues is 1, 2, 3, // 4, we need to add 4 to result CellConstantSet remainingElements = possibleValues.Difference(negated.Elements); result.AddRange(remainingElements); } else { result.Add(constant); } } return result; }
/// <summary> /// Gets the list of files that was installed by the given mod. /// </summary> /// <param name="p_modInstaller">The mod whose isntalled files are to be returned.</param> /// <returns>The list of files that was installed by the given mod.</returns> public IList<string> GetInstalledModFiles(IMod p_modInstaller) { Set<string> setFiles = new Set<string>(StringComparer.OrdinalIgnoreCase); string strInstallerKey = GetModKey(p_modInstaller); if (String.IsNullOrEmpty(strInstallerKey) || m_setRemovedModKeys.Contains(strInstallerKey)) return setFiles; setFiles.AddRange(from itm in m_dicInstalledFiles where itm.Installers.Contains(strInstallerKey) select itm.Item); setFiles.AddRange(from itm in EnlistedInstallLog.m_dicInstalledFiles where itm.Installers.Contains(strInstallerKey) select itm.Item); setFiles.RemoveRange(from itm in m_dicUninstalledFiles where itm.Installers.Contains(strInstallerKey) select itm.Item); return setFiles; }
private Set<int> GetAllAffectedWficsFromTags(List<int> ttagAnnList) { // This sets up UpdateAffectedBundles() to do all the PropChangeds at once // First add the selected wfics to our collection Set<int> results = new Set<int>(); results.AddRange(SelectedWfics); // Then add any wfics affected (deleted) by this tag addition foreach (int hvoTTag in ttagAnnList) { ICmIndirectAnnotation ann = CmObject.CreateFromDBObject(Cache, hvoTTag) as ICmIndirectAnnotation; foreach (ICmAnnotation ttagTarget in ann.AppliesToRS) { if (ttagTarget is ICmBaseAnnotation) results.Add(ttagTarget.Hvo); } } return results; }
// requires: constraint.ChildColumns form a key in // constraint.ChildTable (actually they should subsume the primary key) private void GuaranteeForeignKeyConstraintInCSpace( QueryRewriter childRewriter, QueryRewriter parentRewriter, ErrorLog errorLog) { var childContext = childRewriter.ViewgenContext; var parentContext = parentRewriter.ViewgenContext; var cNode = childRewriter.BasicView; var pNode = parentRewriter.BasicView; var qp = FragmentQueryProcessor.Merge(childContext.RightFragmentQP, parentContext.RightFragmentQP); var cImpliesP = qp.IsContainedIn(cNode.RightFragmentQuery, pNode.RightFragmentQuery); if (false == cImpliesP) { // Foreign key constraint not being ensured in C-space var childExtents = LeftCellWrapper.GetExtentListAsUserString(cNode.GetLeaves()); var parentExtents = LeftCellWrapper.GetExtentListAsUserString(pNode.GetLeaves()); var message = Strings.ViewGen_Foreign_Key_Not_Guaranteed_InCSpace( ToUserString()); // Add all wrappers into allWrappers var allWrappers = new Set<LeftCellWrapper>(pNode.GetLeaves()); allWrappers.AddRange(cNode.GetLeaves()); var record = new ErrorLog.Record(ViewGenErrorCode.ForeignKeyNotGuaranteedInCSpace, message, allWrappers, String.Empty); errorLog.AddEntry(record); } }
// requires: "properties" corresponds to all the properties that are // inside cNode.Value, e.g., cNode corresponds to an extent Person, // properties contains all the properties inside Person (recursively) // effects: Given C-side and S-side Cell Query for a cell, generates // the projected slots on both sides corresponding to // properties. Also updates the C-side whereclause corresponding to // discriminator properties on the C-side, e.g, isHighPriority private void ExtractProperties( IEnumerable<StoragePropertyMapping> properties, MemberPath cNode, List<ProjectedSlot> cSlots, ref BoolExpression cQueryWhereClause, MemberPath sRootExtent, List<ProjectedSlot> sSlots, ref BoolExpression sQueryWhereClause) { // For each property mapping, we add an entry to the C and S cell queries foreach (var propMap in properties) { var scalarPropMap = propMap as StorageScalarPropertyMapping; var complexPropMap = propMap as StorageComplexPropertyMapping; var associationEndPropertypMap = propMap as StorageEndPropertyMapping; var conditionMap = propMap as StorageConditionPropertyMapping; Debug.Assert( scalarPropMap != null || complexPropMap != null || associationEndPropertypMap != null || conditionMap != null, "Unimplemented property mapping"); if (scalarPropMap != null) { Debug.Assert(scalarPropMap.ColumnProperty != null, "ColumnMember for a Scalar Property can not be null"); // Add an attribute node to node var cAttributeNode = new MemberPath(cNode, scalarPropMap.EdmProperty); // Add a column (attribute) node the sQuery // unlike the C side, there is no nesting. Hence we // did not need an internal node var sAttributeNode = new MemberPath(sRootExtent, scalarPropMap.ColumnProperty); cSlots.Add(new MemberProjectedSlot(cAttributeNode)); sSlots.Add(new MemberProjectedSlot(sAttributeNode)); } // Note: S-side constants are not allowed since they can cause // problems -- for example, if such a cell says 5 for the // third field, we cannot guarantee the fact that an // application may not set that field to 7 in the C-space // Check if the property mapping is for a complex types if (complexPropMap != null) { foreach (var complexTypeMap in complexPropMap.TypeMappings) { // Create a node for the complex type property and call recursively var complexMemberNode = new MemberPath(cNode, complexPropMap.EdmProperty); //Get the list of types that this type map represents var allTypes = new Set<EdmType>(); // Gather a set of all explicit types for an entity // type mapping in allTypes. var exactTypes = Helpers.AsSuperTypeList<ComplexType, EdmType>(complexTypeMap.Types); allTypes.AddRange(exactTypes); foreach (EdmType type in complexTypeMap.IsOfTypes) { allTypes.AddRange( MetadataHelper.GetTypeAndSubtypesOf( type, m_containerMapping.StorageMappingItemCollection.EdmItemCollection, false /*includeAbstractTypes*/)); } var complexInTypes = BoolExpression.CreateLiteral(new TypeRestriction(complexMemberNode, allTypes), null); cQueryWhereClause = BoolExpression.CreateAnd(cQueryWhereClause, complexInTypes); // Now extract the properties of the complex type // (which could have other complex types) ExtractProperties( complexTypeMap.AllProperties, complexMemberNode, cSlots, ref cQueryWhereClause, sRootExtent, sSlots, ref sQueryWhereClause); } } // Check if the property mapping is for an associaion if (associationEndPropertypMap != null) { // create join tree node representing this relation end var associationEndNode = new MemberPath(cNode, associationEndPropertypMap.EndMember); // call recursively ExtractProperties( associationEndPropertypMap.Properties, associationEndNode, cSlots, ref cQueryWhereClause, sRootExtent, sSlots, ref sQueryWhereClause); } //Check if the this is a condition and add it to the Where clause if (conditionMap != null) { if (conditionMap.ColumnProperty != null) { //Produce a Condition Expression for the Condition Map. var conditionExpression = GetConditionExpression(sRootExtent, conditionMap); //Add the condition expression to the exisiting S side Where clause using an "And" sQueryWhereClause = BoolExpression.CreateAnd(sQueryWhereClause, conditionExpression); } else { Debug.Assert(conditionMap.EdmProperty != null); //Produce a Condition Expression for the Condition Map. var conditionExpression = GetConditionExpression(cNode, conditionMap); //Add the condition expression to the exisiting C side Where clause using an "And" cQueryWhereClause = BoolExpression.CreateAnd(cQueryWhereClause, conditionExpression); } } } }
private void SetNewOccurrencesOfWordforms(ProgressDialogWorkingOn progress) { Set<int> changes = new Set<int>(); foreach (ParaChangeInfo info in m_changedParas.Values) { changes.AddRange(info.Changes); } if (AllChanged) { m_newOccurrencesOldWf = new int[0]; // no remaining occurrences } else { // Only some changed, need to figure m_newOccurrences List<int> newOccurrencesOldWf = new List<int>(); foreach (int hvo in OldOccurrencesOfOldWordform) { //The offsets of our occurrences have almost certainly changed. //Update them so that the respelling dialog view will appear correct. var occur = RespellSda.OccurrenceFromHvo(hvo) as LocatedAnalysisOccurrence; if (occur != null) { occur.ResetSegmentOffsets(); } if (!changes.Contains(hvo)) { newOccurrencesOldWf.Add(hvo); } } m_newOccurrencesOldWf = newOccurrencesOldWf.ToArray(); } UpdateProgress(progress); List<int> newOccurrences = new List<int>(m_oldOccurrencesNewWf.Length + changes.Count); newOccurrences.AddRange(m_oldOccurrencesNewWf); newOccurrences.AddRange(changes); m_newOccurrencesNewWf = newOccurrences.ToArray(); RespellSda.ReplaceOccurrences(OldWordform, m_newOccurrencesOldWf); RespellSda.ReplaceOccurrences(NewWordform, m_newOccurrencesNewWf); SendCountVirtualPropChanged(NewWordform); SendCountVirtualPropChanged(OldWordform); }
private void ResyncTagsForAffectedBundles() { // only load on valid xfics. List<int> validXfics = new List<int>(); foreach (int xfic in m_affectedXfics) { if (m_interlinDoc.Cache.IsValidObject(xfic)) validXfics.Add(xfic); } Set<int> allAffected = new Set<int>(validXfics); List<int> newAllAffected = new List<int>(m_interlinDoc.GetAllXficsPossiblyAffectedByTagging(validXfics)); // load tags for the union of both new and old, to make sure everything is covered. allAffected.AddRange(newAllAffected); List<int> xficsToLoadTagsFor = new List<int>(allAffected); (m_interlinDoc.m_vc as InterlinTaggingVc).LoadTextTagsForXfics(xficsToLoadTagsFor); m_interlinDoc.AllowLayout = true; (m_interlinDoc).UpdateAffectedBundles(new Set<int>(xficsToLoadTagsFor)); if (m_originalSelection != null) m_originalSelection.RestoreSelectionAndScrollPos(); }
/// <summary> /// Checks for overlapping tags. /// </summary> /// <param name="hvoWfics">The hvo wfics.</param> /// <param name="hvosToDelete">The hvos to delete.</param> /// <returns>A collection of wfic hvos that are affected by this change.</returns> private Set<int> CheckForOverlappingTags(List<int> hvoWfics, ref Set<int> hvosToDelete) { // Look through all existing tags pointing to this wfic. List<int> tags = FindAllTagsReferencingXficList(hvoWfics); // If we find one that AppliesTo something in hvoWfics then do: if (tags.Count > 0) hvosToDelete.AddRange(tags); return GetAllAffectedWficsFromTags(tags); }
/// <summary> /// re-creates the list of project items and the list of available item types /// </summary> internal void CreateItemsListFromMSBuild() { WorkbenchSingleton.AssertMainThread(); lock (SyncRoot) { foreach (ProjectItem item in items) { item.Dispose(); } items.Clear(); itemsReadOnly = null; // remove readonly variant of item list - will regenerate on next Items call Set<ItemType> availableFileItemTypes = new Set<ItemType>(); availableFileItemTypes.AddRange(ItemType.DefaultFileItems); foreach (MSBuild.BuildItem item in project.GetEvaluatedItemsByName("AvailableItemName")) { availableFileItemTypes.Add(new ItemType(item.Include)); } this.availableFileItemTypes = availableFileItemTypes.AsReadOnly(); foreach (MSBuild.BuildItem item in project.EvaluatedItems) { if (item.IsImported) continue; items.Add(CreateProjectItem(item)); } } ClearFindFileCache(); }
/// <summary> /// Given a set of wfics, return the set of xfics that may be affected /// by tagging or undoing the applied tag (which may overlap other xfics). /// </summary> /// <param name="hvoWfics"></param> /// <returns></returns> private Set<int> GetAllXficsPossiblyAffectedByTagging(List<int> hvoWfics) { // This is overkill, but there are too many cases to handle during undo/redo // to cover with just CheckForOverlappingTags(). // For now get all the xfics for the segments owning the given wfics // so we can make sure the display will be properly updated. Set<int> segments = new Set<int>(); foreach (int hvoWfic in hvoWfics) { // first collect a list of parent segments. StTxtPara.TwficInfo ti = new StTxtPara.TwficInfo(Cache, hvoWfic); if (ti.SegmentHvo == 0) continue; segments.Add(ti.SegmentHvo); } // now get all the xfics for those segments Set<int> allPossiblyAffectedXfics = new Set<int>(); allPossiblyAffectedXfics.AddRange((m_vc as InterlinTaggingVc).CollectXficsFromSegments(segments.ToArray())); return allPossiblyAffectedXfics; }
// requires: constraint.ChildColumns form a key in // constraint.ChildTable (actually they should subsume the primary key) private void GuaranteeForeignKeyConstraintInCSpace(QueryRewriter childRewriter, QueryRewriter parentRewriter, ErrorLog errorLog, ConfigViewGenerator config) { ViewgenContext childContext = childRewriter.ViewgenContext; ViewgenContext parentContext = parentRewriter.ViewgenContext; CellTreeNode cNode = childRewriter.BasicView; CellTreeNode pNode = parentRewriter.BasicView; FragmentQueryProcessor qp = FragmentQueryProcessor.Merge(childContext.RightFragmentQP, parentContext.RightFragmentQP); bool cImpliesP = qp.IsContainedIn(cNode.RightFragmentQuery, pNode.RightFragmentQuery); if (false == cImpliesP) { // Foreign key constraint not being ensured in C-space string childExtents = LeftCellWrapper.GetExtentListAsUserString(cNode.GetLeaves()); string parentExtents = LeftCellWrapper.GetExtentListAsUserString(pNode.GetLeaves()); string message = System.Data.Entity.Strings.ViewGen_Foreign_Key_Not_Guaranteed_InCSpace( ToUserString()); // Add all wrappers into allWrappers Set<LeftCellWrapper> allWrappers = new Set<LeftCellWrapper>(pNode.GetLeaves()); allWrappers.AddRange(cNode.GetLeaves()); ErrorLog.Record record = new ErrorLog.Record(true, ViewGenErrorCode.ForeignKeyNotGuaranteedInCSpace, message, allWrappers, String.Empty); errorLog.AddEntry(record); } }
/// <summary> /// /// </summary> /// <param name="xfics"></param> internal void LoadTextTagsForXfics(List<int> xfics) { List<int> textTagList = InterlinTaggingChild.GetTaggingReferencingTheseAnnotations(Cache, xfics, m_textTagAnnDefn); Set<int> xficsTagged = new Set<int>(); foreach (int hvoTag in textTagList) xficsTagged.AddRange(CacheTagString(hvoTag)); // Preload doesn't need PropChanged() to fire. This version doesn't. // now go through the list of xfics that didn't have tags cached, and make sure they have empty strings cached Set<int> xficsWithoutTags = xficsTagged.SymmetricDifference(xfics); CacheNullTagString(xficsWithoutTags.ToArray(), false); }
[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)] //For EdmItemCollection constructor call. //Since we pass in empty collection for paths, we do not have any resource exposure here. private IList<EdmSchemaError> InternalGenerateMetadata() { if (_modelEntityContainer != null) { _modelEntityContainer = null; _mappingLookups = null; _edmItemCollection = null; } LoadMethodSessionState session = new LoadMethodSessionState(); try { session.EdmItemCollection = new EdmItemCollection(); if (this.GenerateForeignKeyProperties && this._targetEntityFrameworkVersion < EntityFrameworkVersions.Version2) { session.AddError(Strings.UnableToGenerateForeignKeyPropertiesForV1, ModelBuilderErrorCode.UnableToGenerateForeignKeyPropertiesForV1, EdmSchemaErrorSeverity.Error, null); return session.Errors; } List<AssociationSet> storeAssociationSets = new List<AssociationSet>(); CollectAllFkProperties(session); EntityContainer modelEntityContainer = new EntityContainer(_modelEntityContainerName, DataSpace.CSpace); // create the EntityTypes and EntitySets, and save up the AssociationSets for later. foreach (EntitySetBase storeSet in _storeEntityContainer.BaseEntitySets) { switch (storeSet.BuiltInTypeKind) { case BuiltInTypeKind.AssociationSet: // save these, and create them after the EntityTypes and EntitySets have been created string errorMessage; if (this.GenerateForeignKeyProperties || !EntityStoreSchemaGenerator.IsFkPartiallyContainedInPK(((AssociationSet)storeSet).ElementType, out errorMessage)) { storeAssociationSets.Add((AssociationSet)storeSet); } else { session.AddError(errorMessage, ModelBuilderErrorCode.UnsupportedForeinKeyPattern, EdmSchemaErrorSeverity.Error, null); } break; case BuiltInTypeKind.EntitySet: EntitySet set = (EntitySet)storeSet; session.CandidateCollapsedAssociations.Add(set, new OneToOneMappingSerializer.CollapsedEntityAssociationSet(set)); break; default: // error throw EDesignUtil.MissingGenerationPatternForType(storeSet.BuiltInTypeKind); } } foreach (AssociationSet storeAssociationSet in storeAssociationSets) { SaveAssociationForCollapsedAssociationCandidate(session, storeAssociationSet); } Set<AssociationSet> associationSetsFromCollapseCandidateRejects = new Set<AssociationSet>(); IEnumerable<OneToOneMappingSerializer.CollapsedEntityAssociationSet> invalidCandidates = FindAllInvalidCollapsedAssociationCandidates(session); // now that we have gone through all of the association sets, foreach (OneToOneMappingSerializer.CollapsedEntityAssociationSet collapsed in invalidCandidates) { session.CandidateCollapsedAssociations.Remove(collapsed.EntitySet); // just create the entity set and save the association set to be added later EntitySet entitySet = CreateModelEntitySet(session, collapsed.EntitySet); modelEntityContainer.AddEntitySetBase(entitySet); associationSetsFromCollapseCandidateRejects.AddRange(collapsed.AssociationSets); } // create all the associations for the invalid collapsed entity association candidates foreach (AssociationSet storeAssociationSet in (IEnumerable<AssociationSet>)associationSetsFromCollapseCandidateRejects) { if (!IsAssociationPartOfCandidateCollapsedAssociation(session, storeAssociationSet)) { AssociationSet set = CreateModelAssociationSet(session, storeAssociationSet); modelEntityContainer.AddEntitySetBase(set); } } // save the set that needs to be created and mapped session.MappingLookups.CollapsedEntityAssociationSets.AddRange(session.CandidateCollapsedAssociations.Values); // do this in a seperate loop so we are sure all the necessary EntitySets have been created foreach (OneToOneMappingSerializer.CollapsedEntityAssociationSet collapsed in session.MappingLookups.CollapsedEntityAssociationSets) { AssociationSet set = CreateModelAssociationSet(session, collapsed); modelEntityContainer.AddEntitySetBase(set); } if (this._targetEntityFrameworkVersion >= EntityFrameworkVersions.Version2) { Debug.Assert(EntityFrameworkVersions.Latest == EntityFrameworkVersions.Version3, "Did you add a new framework version"); // add LazyLoadingEnabled=true to the EntityContainer MetadataProperty lazyLoadingAttribute = new MetadataProperty( DesignXmlConstants.EdmAnnotationNamespace + ":" + DesignXmlConstants.LazyLoadingEnabled, TypeUsage.CreateStringTypeUsage( PrimitiveType.GetEdmPrimitiveType( PrimitiveTypeKind.String), false, false), true); modelEntityContainer.AddMetadataProperties(new List<MetadataProperty>() { lazyLoadingAttribute }); this._hasAnnotationNamespace = true; } // Map store functions to function imports. MapFunctions(session, modelEntityContainer); if (!EntityStoreSchemaGenerator.HasErrorSeverityErrors(session.Errors)) { // add them to the collection so they will work if someone wants to use the collection foreach (EntityType type in session.MappingLookups.StoreEntityTypeToModelEntityType.Values) { type.SetReadOnly(); session.EdmItemCollection.AddInternal(type); } foreach (AssociationType type in session.MappingLookups.StoreAssociationTypeToModelAssociationType.Values) { type.SetReadOnly(); session.EdmItemCollection.AddInternal(type); } foreach (OneToOneMappingSerializer.CollapsedEntityAssociationSet set in session.MappingLookups.CollapsedEntityAssociationSets) { set.ModelAssociationSet.ElementType.SetReadOnly(); session.EdmItemCollection.AddInternal(set.ModelAssociationSet.ElementType); } modelEntityContainer.SetReadOnly(); session.EdmItemCollection.AddInternal(modelEntityContainer); _modelEntityContainer = modelEntityContainer; _mappingLookups = session.MappingLookups; _edmItemCollection = session.EdmItemCollection; } } catch (Exception e) { if (MetadataUtil.IsCatchableExceptionType(e)) { // an exception in the code is definitely an error string message = EDesignUtil.GetMessagesFromEntireExceptionChain(e); session.AddError(message, ModelBuilderErrorCode.UnknownError, EdmSchemaErrorSeverity.Error, e); } else { throw; } } return session.Errors; }
/// <summary> /// Gets all synsets for a word, optionally restricting the returned synsets to one or more parts of speech. This /// method does not perform any morphological analysis to match up the given word. It does, however, replace all /// spaces with underscores and call String.ToLower to normalize case. /// </summary> /// <param name="word">Word to get SynSets for. This method will replace all spaces with underscores and /// call ToLower() to normalize the word's case.</param> /// <param name="posRestriction">POSs to search. Cannot contain POS.None. Will search all POSs if no restriction /// is given.</param> /// <returns>Set of SynSets that contain word</returns> public Set<SynSet> GetSynSets(string word, params POS[] posRestriction) { // use all POSs if none are supplied if (posRestriction == null || posRestriction.Length == 0) posRestriction = new POS[] { POS.Adjective, POS.Adverb, POS.Noun, POS.Verb }; Set<POS> posSet = new Set<POS>(posRestriction); if (posSet.Contains(POS.None)) throw new Exception("Invalid SynSet POS request: " + POS.None); // all words are lower case and space-replaced word = word.ToLower().Replace(' ', '_'); // gather synsets for each POS Set<SynSet> allSynsets = new Set<SynSet>(); foreach (POS pos in posSet) if (_inMemory) { // read instantiated synsets from memory Set<SynSet> synsets; if (_posWordSynSets[pos].TryGetValue(word, out synsets)) // optimization: if there are no more parts of speech to check, we have all the synsets - so set the return collection and make it read-only. this is faster than calling AddRange on a set. if (posSet.Count == 1) { allSynsets = synsets; allSynsets.IsReadOnly = true; } else allSynsets.AddRange(synsets); } else { // get index line for word string indexLine = _posIndexWordSearchStream[pos].Search(word); // if index line exists, get synset shells and instantiate them if (indexLine != null) { // get synset shells and instantiate them SynSet mostCommonSynset; Set<SynSet> synsets = GetSynSetShells(indexLine, pos, out mostCommonSynset, this); foreach (SynSet synset in synsets) { synset.Instantiate(); allSynsets.Add(synset); } // we only need to set this flag if there is more than one synset for the word-pos pair if (synsets.Count > 1) mostCommonSynset.SetAsMostCommonSynsetFor(word); } } return allSynsets; }
private ImportStatus ProcessDocument(Fb2DocumentEntry documentEntry, Stream stream, XmlDocument document, Encoding encoding) { ImportStatus importResult; FictionBook fictionBook; try { importResult = documentEntry.Status; fictionBook = new FictionBook(document, encoding); if(filter != null && !filter.Fit(new FilterAdapter(fictionBook))) { documentEntry.Status = ImportStatus.FilteredOut; this.processLegend.IncrementCounter(documentEntry.Status); return documentEntry.Status; } BookAction action = BookAction.None; if (!dontCheckForDuplicate) { Set<int> simularAuthors = new Set<int>(); foreach (AuthorInfoNode authorInfoNode in fictionBook.TitleInfo.Authors) { List<Author> list = this.database.FindSimularAuthors(authorInfoNode); simularAuthors.AddRange(list.ConvertAll(delegate(Author author) { return author.Id ?? 0; })); } List<BookInfo> books = this.database.LoadBookInfoByDocumentId(fictionBook.DocumentInfo.Id); action = CheckForDuplicate(fictionBook, documentEntry, books); if (action == BookAction.Add) { List<BookInfo> simularBooks = this.database.LoadBookInfoByAuthorIdList(simularAuthors); action = CheckForDuplicate(fictionBook, documentEntry, simularBooks); } } else { action = BookAction.Add; } stream.Seek(0, SeekOrigin.Begin); BookInfo bookInfo; switch (action) { case BookAction.Add: importResult = ImportStatus.Added; bookInfo = this.database.CreateFictionBook(fictionBook, stream, documentEntry); documentEntry.BookId = bookInfo.BookId; documentEntry.Status = ImportStatus.Added; break; case BookAction.Update: importResult = ImportStatus.Updated; bookInfo = this.database.LoadBookInfoByBookId(documentEntry.BookId ?? -1); this.database.UpdateFictionBook(bookInfo, fictionBook, stream, documentEntry); documentEntry.BookId = bookInfo.BookId; documentEntry.Status = ImportStatus.Updated; break; case BookAction.None: importResult = documentEntry.Status; break; } switch (documentEntry.Status) { case ImportStatus.Added: case ImportStatus.Updated: case ImportStatus.Duplicate: case ImportStatus.DuplicateIDsDiffer: case ImportStatus.DuplicateNewer: case ImportStatus.DuplicateOlder: this.processLegend.IncrementCounter(documentEntry.Status); break; } } catch(DatabaseException exp) { documentEntry.ErrorText = exp.Message; this.processLegend.IncrementCounter(ImportStatus.DatabaseError); importResult = ImportStatus.DatabaseError; } catch (InvalidFictionBookFormatException exp) { documentEntry.ErrorText = exp.Message; this.processLegend.IncrementCounter(ImportStatus.ParsingError); importResult = ImportStatus.ParsingError; } return importResult; }
// effects: Ensures that there is a relationship mapped into the C-space for some cell in m_cellGroup. Else // adds an error to errorLog private void GuaranteeMappedRelationshipForForeignKey( QueryRewriter childRewriter, QueryRewriter parentRewriter, IEnumerable<Cell> cells, ErrorLog errorLog, ConfigViewGenerator config) { var childContext = childRewriter.ViewgenContext; var parentContext = parentRewriter.ViewgenContext; // Find a cell where this foreign key is mapped as a relationship var prefix = new MemberPath(ChildTable); var primaryKey = ExtentKey.GetPrimaryKeyForEntityType(prefix, ChildTable.ElementType); var primaryKeyFields = primaryKey.KeyFields; var foundCell = false; var foundValidParentColumnsForForeignKey = false; //we need to find only one, dont error on any one check being false List<ErrorLog.Record> errorListForInvalidParentColumnsForForeignKey = null; foreach (var cell in cells) { if (cell.SQuery.Extent.Equals(ChildTable) == false) { continue; } // The childtable is mapped to a relationship in the C-space in cell // Check that all the columns of the foreign key and the primary key in the child table are mapped to some // property in the C-space var parentEnd = GetRelationEndForColumns(cell, ChildColumns); if (parentEnd != null && CheckParentColumnsForForeignKey(cell, cells, parentEnd, ref errorListForInvalidParentColumnsForForeignKey) == false) { // Not an error unless we find no valid case continue; } else { foundValidParentColumnsForForeignKey = true; } var childEnd = GetRelationEndForColumns(cell, primaryKeyFields); Debug.Assert( childEnd == null || parentEnd != childEnd, "Ends are same => PKey and child columns are same - code should gone to other method"); // Note: If both of them are not-null, they are mapped to the // same association set -- since we checked that particular cell if (childEnd != null && parentEnd != null && FindEntitySetForColumnsMappedToEntityKeys(cells, primaryKeyFields) != null) { foundCell = true; CheckConstraintWhenParentChildMapped(cell, errorLog, parentEnd, config); break; // Done processing for the foreign key - either it was mapped correctly or it was not } else if (parentEnd != null) { // At this point, we know cell corresponds to an association set var assocSet = (AssociationSet)cell.CQuery.Extent; var parentSet = MetadataHelper.GetEntitySetAtEnd(assocSet, parentEnd); foundCell = CheckConstraintWhenOnlyParentMapped(assocSet, parentEnd, childRewriter, parentRewriter); if (foundCell) { break; } } } //CheckParentColumnsForForeignKey has returned no matches, Error. if (!foundValidParentColumnsForForeignKey) { Debug.Assert( errorListForInvalidParentColumnsForForeignKey != null && errorListForInvalidParentColumnsForForeignKey.Count > 0); foreach (var errorRecord in errorListForInvalidParentColumnsForForeignKey) { errorLog.AddEntry(errorRecord); } return; } if (foundCell == false) { // No cell found -- Declare error var message = Strings.ViewGen_Foreign_Key_Missing_Relationship_Mapping(ToUserString()); IEnumerable<LeftCellWrapper> parentWrappers = GetWrappersFromContext(parentContext, ParentTable); IEnumerable<LeftCellWrapper> childWrappers = GetWrappersFromContext(childContext, ChildTable); var bothExtentWrappers = new Set<LeftCellWrapper>(parentWrappers); bothExtentWrappers.AddRange(childWrappers); var record = new ErrorLog.Record( ViewGenErrorCode.ForeignKeyMissingRelationshipMapping, message, bothExtentWrappers, String.Empty); errorLog.AddEntry(record); } }
/// <summary> /// A simple constructor that initializes the object with its dependencies. /// </summary> /// <param name="p_gmiGameModeInfo">The environment info of the current game mode.</param> /// <param name="p_modMod">The mod being installed.</param> /// <param name="p_ilgInstallLog">The install log to use to log file installations.</param> /// <param name="p_tfmFileManager">The transactional file manager to use to interact with the file system.</param> /// <param name="p_futFileUtility">The file utility class.</param> /// <param name="p_dlgOverwriteConfirmationDelegate">The method to call in order to confirm an overwrite.</param> public GamebryoGameSpecificValueUpgradeInstaller(IMod p_modMod, IGameModeEnvironmentInfo p_gmiGameModeInfo, IInstallLog p_ilgInstallLog, TxFileManager p_tfmFileManager, FileUtil p_futFileUtility, ConfirmItemOverwriteDelegate p_dlgOverwriteConfirmationDelegate) :base(p_modMod, p_gmiGameModeInfo, p_ilgInstallLog, p_tfmFileManager, p_futFileUtility, p_dlgOverwriteConfirmationDelegate) { OriginallyInstalledEdits = new Set<string>(); OriginallyInstalledEdits.AddRange(InstallLog.GetInstalledGameSpecificValueEdits(Mod)); }
public Set<KeyValuePair<string, byte>> GetPreferredDTEPairs( Set<string> replacements, Set<KeyValuePair<string, byte>> currentPairs, Stack<byte> dteBytes ) { // Clone the sections var secs = GetCopyOfSections(); IList<byte> bytes = GetSectionByteArrays( secs, CharMap, CompressionAllowed ).Join(); Set<KeyValuePair<string, byte>> result = new Set<KeyValuePair<string, byte>>(); // Determine if we even need to do DTE at all int bytesNeeded = bytes.Count - ( Layout.Size - DataStart ); if ( bytesNeeded <= 0 ) { return result; } // Take the pairs that were already used for other files and encode this file with them result.AddRange( currentPairs ); TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( result ) ); bytes = GetSectionByteArrays( secs, CharMap, CompressionAllowed ).Join(); // If enough bytes were saved with the existing pairs, no need to look further bytesNeeded = bytes.Count - ( Layout.Size - DataStart ); if ( bytesNeeded <= 0 ) { return result; } // Otherwise, get all the strings that can be DTE encoded StringBuilder sb = new StringBuilder( Layout.Size ); for ( int i = 0; i < secs.Count; i++ ) { if ( DteAllowed[i] ) { secs[i].ForEach( t => sb.Append( t ).Append( "{0xFE}" ) ); } } // ... determine pair frequency var dict = TextUtilities.GetPairAndTripleCounts( sb.ToString(), replacements ); // Sort the list by count var l = new List<KeyValuePair<string, int>>( dict ); l.Sort( ( a, b ) => b.Value.CompareTo( a.Value ) ); // Go through each one, encode the file with it, and see if we're below the limit while ( bytesNeeded > 0 && l.Count > 0 && dteBytes.Count > 0 ) { result.Add( new KeyValuePair<string, byte>( l[0].Key, dteBytes.Pop() ) ); TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( result ) ); bytes = GetSectionByteArrays( secs, CharMap, CompressionAllowed ).Join(); bytesNeeded = bytes.Count - ( Layout.Size - DataStart ); if ( bytesNeeded > 0 ) { StringBuilder sb2 = new StringBuilder( Layout.Size ); for ( int i = 0; i < secs.Count; i++ ) { if ( DteAllowed[i] ) { secs[i].ForEach( t => sb2.Append( t ).Append( "{0xFE}" ) ); } } l = new List<KeyValuePair<string, int>>( TextUtilities.GetPairAndTripleCounts( sb2.ToString(), replacements ) ); l.Sort( ( a, b ) => b.Value.CompareTo( a.Value ) ); } } // Ran out of available pairs and still don't have enough space --> error if ( bytesNeeded > 0 ) { return null; } return result; }
public virtual Set<KeyValuePair<string, byte>> GetPreferredDTEPairs( Set<string> replacements, Set<KeyValuePair<string, byte>> currentPairs, Stack<byte> dteBytes, System.ComponentModel.BackgroundWorker worker ) { // Clone the sections var secs = GetCopyOfSections(); IList<byte> bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); Set<KeyValuePair<string, byte>> result = new Set<KeyValuePair<string, byte>>(); // Determine if we even need to do DTE at all int bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (bytesNeeded <= 0) { return result; } // Take the pairs that were already used for other files and encode this file with them result.AddRange( currentPairs ); TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( result ) ); bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); // If enough bytes were saved with the existing pairs, no need to look further bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (bytesNeeded <= 0) { return result; } string terminatorString = string.Format( "{{0x{0:X2}", selectedTerminator ) + "}"; // Otherwise, get all the strings that can be DTE encoded StringBuilder sb = new StringBuilder( Layout.Size ); for (int i = 0; i < secs.Count; i++) { if (DteAllowed[i]) { secs[i].ForEach( t => sb.Append( t ).Append( terminatorString ) ); } } // ... determine pair frequency var dict = TextUtilities.GetPairAndTripleCounts( sb.ToString(), replacements ); // Sort the list by count var l = new List<KeyValuePair<string, int>>( dict ); l.Sort( ( a, b ) => b.Value.CompareTo( a.Value ) ); // Go through each one, encode the file with it, and see if we're below the limit while (bytesNeeded > 0 && l.Count > 0 && dteBytes.Count > 0) { /* byte currentDteByte = dteBytes.Pop(); for (int j = 0; j < l.Count; j++) { var tempResult = new Set<KeyValuePair<string, byte>>( result ); tempResult.Add( new KeyValuePair<string, byte>( l[j].Key, currentDteByte ) ); var oldBytesNeeded = bytesNeeded; TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( tempResult ) ); bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); var newBytesNeeded = bytes.Count - (Layout.Size - DataStart); if (newBytesNeeded < oldBytesNeeded) { bytesNeeded = newBytesNeeded; result.Add( new KeyValuePair<string, byte>( l[j].Key, currentDteByte ) ); TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( result ) ); //bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); //bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (newBytesNeeded > 0) { StringBuilder sb2 = new StringBuilder( Layout.Size ); for (int i = 0; i < secs.Count; i++) { if (DteAllowed[i]) { secs[i].ForEach( t => sb2.Append( t ).Append( terminatorString ) ); } } l = new List<KeyValuePair<string, int>>( TextUtilities.GetPairAndTripleCounts( sb2.ToString(), replacements ) ); l.Sort( ( a, b ) => b.Value.CompareTo( a.Value ) ); secs = GetCopyOfSections(); } break; } } */ result.Add( new KeyValuePair<string, byte>( l[0].Key, dteBytes.Pop() ) ); TextUtilities.DoDTEEncoding( secs, DteAllowed, PatcherLib.Utilities.Utilities.DictionaryFromKVPs( result ) ); bytes = GetSectionByteArrays( secs, SelectedTerminator, CharMap, CompressionAllowed ).Join(); bytesNeeded = bytes.Count - (Layout.Size - DataStart); if (bytesNeeded > 0) { if (worker != null) worker.ReportProgress(0, new ProgressForm.FileProgress { File = this, State = ProgressForm.TaskState.Starting, Task = ProgressForm.Task.CalculateDte, BytesLeft = bytesNeeded } ); StringBuilder sb2 = new StringBuilder( Layout.Size ); for (int i = 0; i < secs.Count; i++) { if (DteAllowed[i]) { secs[i].ForEach( t => sb2.Append( t ).Append( terminatorString ) ); } } l = new List<KeyValuePair<string, int>>( TextUtilities.GetPairAndTripleCounts( sb2.ToString(), replacements ) ); l.Sort( ( a, b ) => b.Value.CompareTo( a.Value ) ); secs = GetCopyOfSections(); } } // Ran out of available pairs and still don't have enough space --> error if (bytesNeeded > 0) { return null; } return result; }
/// <summary> /// Gets the list of Game Specific Value edited keys that were installed by the given mod. /// </summary> /// <param name="p_modInstaller">The mod whose isntalled edits are to be returned.</param> /// <returns>The list of edited keys that was installed by the given mod.</returns> public IList<string> GetInstalledGameSpecificValueEdits(IMod p_modInstaller) { Set<string> setGameSpecificValues = new Set<string>(); string strInstallerKey = GetModKey(p_modInstaller); if (String.IsNullOrEmpty(strInstallerKey) || m_setRemovedModKeys.Contains(strInstallerKey)) return setGameSpecificValues; setGameSpecificValues.AddRange(from itm in m_dicInstalledGameSpecificValueEdits where itm.Installers.Contains(strInstallerKey) select itm.Item); setGameSpecificValues.AddRange(from itm in EnlistedInstallLog.m_dicInstalledGameSpecificValueEdits where itm.Installers.Contains(strInstallerKey) select itm.Item); setGameSpecificValues.RemoveRange(from itm in m_dicUninstalledGameSpecificValueEdits where itm.Installers.Contains(strInstallerKey) select itm.Item); return setGameSpecificValues; }
// effects: Given the metadata information for a container in // containerMap, generate the cells for it and modify cells to // contain the newly-generated cells private void ExtractCells(List<Cell> cells) { // extract entity mappings, i.e., for CPerson1, COrder1, etc foreach (var extentMap in m_containerMapping.AllSetMaps) { // Get each type map in an entity set mapping, i.e., for // CPerson, CCustomer, etc in CPerson1 foreach (var typeMap in extentMap.TypeMappings) { var entityTypeMap = typeMap as StorageEntityTypeMapping; Debug.Assert( entityTypeMap != null || typeMap is StorageAssociationTypeMapping, "Invalid typemap"); // A set for all the types in this type mapping var allTypes = new Set<EdmType>(); if (entityTypeMap != null) { // Gather a set of all explicit types for an entity // type mapping in allTypes. Note that we do not have // subtyping in association sets allTypes.AddRange(entityTypeMap.Types); foreach (var type in entityTypeMap.IsOfTypes) { var typeAndSubTypes = MetadataHelper.GetTypeAndSubtypesOf( type, m_containerMapping.StorageMappingItemCollection.EdmItemCollection, false /*includeAbstractTypes*/); allTypes.AddRange(typeAndSubTypes); } } var extent = extentMap.Set; Debug.Assert( extent != null, "Extent map for a null extent or type of extentMap.Exent " + "is not Extent"); // For each table mapping for the type mapping, we create cells foreach (var fragmentMap in typeMap.MappingFragments) { ExtractCellsFromTableFragment(extent, fragmentMap, allTypes, cells); } } } }