/// <summary> /// A MultiPoint is simple if it has no repeated points. /// </summary> public bool IsSimple(IMultiPoint mp) { if (mp.IsEmpty) return true; Set<ICoordinate> points = new Set<ICoordinate>(); for (int i = 0; i < mp.NumGeometries; i++) { IPoint pt = (IPoint) mp.GetGeometryN(i); ICoordinate p = pt.Coordinate; if (points.Contains(p)) return false; points.Add(p); } return true; }
public void Init(FdoCache cache, bool enableCancel) { CheckDisposed(); m_cache = cache; m_btnCancel.Visible = enableCancel; Set<int> revIdxWs = new Set<int>(4); foreach (IReversalIndex ri in cache.LangProject.LexDbOA.ReversalIndexesOC) revIdxWs.Add(ri.WritingSystemRAHvo); // Include only the analysis writing systems chosen by the user. See LT-7514 and LT-7239. Set<int> activeWs = new Set<int>(8); foreach (int ws in cache.LangProject.AnalysisWssRC.HvoArray) activeWs.Add(ws); m_cbWritingSystems.Sorted = true; m_cbWritingSystems.DisplayMember = "Name"; NamedWritingSystem nwsSelected = null; foreach (NamedWritingSystem nws in cache.LangProject.GetDbNamedWritingSystems()) { if (revIdxWs.Contains(nws.Hvo)) { AddLanguageForExistingRevIdx(nws.IcuLocale); continue; } if (!activeWs.Contains(nws.Hvo)) continue; m_cbWritingSystems.Items.Add(nws); if (nwsSelected == null && !LanguageMatchesExistingRevIdx(nws.IcuLocale)) nwsSelected = nws; } if (nwsSelected != null) m_cbWritingSystems.SelectedItem = nwsSelected; if (m_cbWritingSystems.Items.Count > 0 && m_cbWritingSystems.SelectedIndex < 0) m_cbWritingSystems.SelectedIndex = 0; if (!enableCancel && m_cbWritingSystems.Items.Count == 0) throw new ApplicationException("Cancel is disabled, but there are none to choose, so the user has no way to get out of this dialog."); }
internal FunctionImportStructuralTypeMappingKB( IEnumerable<FunctionImportStructuralTypeMapping> structuralTypeMappings, ItemCollection itemCollection) { DebugCheck.NotNull(structuralTypeMappings); DebugCheck.NotNull(itemCollection); m_itemCollection = itemCollection; // If no specific type mapping. if (structuralTypeMappings.Count() == 0) { // Initialize with defaults. ReturnTypeColumnsRenameMapping = new Dictionary<string, FunctionImportReturnTypeStructuralTypeColumnRenameMapping>(); NormalizedEntityTypeMappings = new ReadOnlyCollection<FunctionImportNormalizedEntityTypeMapping>( new List<FunctionImportNormalizedEntityTypeMapping>()); DiscriminatorColumns = new ReadOnlyCollection<string>(new List<string>()); MappedEntityTypes = new ReadOnlyCollection<EntityType>(new List<EntityType>()); return; } var entityTypeMappings = structuralTypeMappings.OfType<FunctionImportEntityTypeMapping>(); // FunctionImportEntityTypeMapping if (null != entityTypeMappings && null != entityTypeMappings.FirstOrDefault()) { var isOfTypeEntityTypeColumnsRenameMapping = new Dictionary<EntityType, Collection<FunctionImportReturnTypePropertyMapping>>(); var entityTypeColumnsRenameMapping = new Dictionary<EntityType, Collection<FunctionImportReturnTypePropertyMapping>>(); var normalizedEntityTypeMappings = new List<FunctionImportNormalizedEntityTypeMapping>(); // Collect all mapped entity types. MappedEntityTypes = entityTypeMappings .SelectMany(mapping => mapping.GetMappedEntityTypes(m_itemCollection)) .Distinct() .ToList() .AsReadOnly(); // Collect all discriminator columns. DiscriminatorColumns = entityTypeMappings .SelectMany(mapping => mapping.GetDiscriminatorColumns()) .Distinct() .ToList() .AsReadOnly(); m_entityTypeLineInfos = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); m_isTypeOfLineInfos = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); foreach (var entityTypeMapping in entityTypeMappings) { // Remember LineInfos for error reporting. foreach (var entityType in entityTypeMapping.EntityTypes) { m_entityTypeLineInfos.Add(entityType, entityTypeMapping.LineInfo); } foreach (var isTypeOf in entityTypeMapping.IsOfTypeEntityTypes) { m_isTypeOfLineInfos.Add(isTypeOf, entityTypeMapping.LineInfo); } // Create map from column name to condition. var columnMap = entityTypeMapping.Conditions.ToDictionary( condition => condition.ColumnName, condition => condition); // Align conditions with discriminator columns. var columnMappings = new List<FunctionImportEntityTypeMappingCondition>(DiscriminatorColumns.Count); for (var i = 0; i < DiscriminatorColumns.Count; i++) { var discriminatorColumn = DiscriminatorColumns[i]; FunctionImportEntityTypeMappingCondition mappingCondition; if (columnMap.TryGetValue(discriminatorColumn, out mappingCondition)) { columnMappings.Add(mappingCondition); } else { // Null indicates the value for this discriminator doesn't matter. columnMappings.Add(null); } } // Create bit map for implied entity types. var impliedEntityTypesBitMap = new bool[MappedEntityTypes.Count]; var impliedEntityTypesSet = new Set<EntityType>(entityTypeMapping.GetMappedEntityTypes(m_itemCollection)); for (var i = 0; i < MappedEntityTypes.Count; i++) { impliedEntityTypesBitMap[i] = impliedEntityTypesSet.Contains(MappedEntityTypes[i]); } // Construct normalized mapping. normalizedEntityTypeMappings.Add( new FunctionImportNormalizedEntityTypeMapping(this, columnMappings, new BitArray(impliedEntityTypesBitMap))); // Construct the rename mappings by adding isTypeOf types and specific entity types to the corresponding lists. foreach (var isOfType in entityTypeMapping.IsOfTypeEntityTypes) { if (!isOfTypeEntityTypeColumnsRenameMapping.Keys.Contains(isOfType)) { isOfTypeEntityTypeColumnsRenameMapping.Add( isOfType, new Collection<FunctionImportReturnTypePropertyMapping>()); } foreach (var rename in entityTypeMapping.ColumnsRenameList) { isOfTypeEntityTypeColumnsRenameMapping[isOfType].Add(rename); } } foreach (var entityType in entityTypeMapping.EntityTypes) { if (!entityTypeColumnsRenameMapping.Keys.Contains(entityType)) { entityTypeColumnsRenameMapping.Add(entityType, new Collection<FunctionImportReturnTypePropertyMapping>()); } foreach (var rename in entityTypeMapping.ColumnsRenameList) { entityTypeColumnsRenameMapping[entityType].Add(rename); } } } ReturnTypeColumnsRenameMapping = new FunctionImportReturnTypeEntityTypeColumnsRenameBuilder( isOfTypeEntityTypeColumnsRenameMapping, entityTypeColumnsRenameMapping) .ColumnRenameMapping; NormalizedEntityTypeMappings = new ReadOnlyCollection<FunctionImportNormalizedEntityTypeMapping>( normalizedEntityTypeMappings); } else { // FunctionImportComplexTypeMapping Debug.Assert( structuralTypeMappings.First() is FunctionImportComplexTypeMapping, "only two types can have renames, complexType and entityType"); var complexTypeMappings = structuralTypeMappings.Cast<FunctionImportComplexTypeMapping>(); Debug.Assert( complexTypeMappings.Count() == 1, "how come there are more than 1, complex type cannot derive from other complex type"); ReturnTypeColumnsRenameMapping = new Dictionary<string, FunctionImportReturnTypeStructuralTypeColumnRenameMapping>(); foreach (var rename in complexTypeMappings.First().ColumnsRenameList) { var columnRenameMapping = new FunctionImportReturnTypeStructuralTypeColumnRenameMapping(rename.CMember); columnRenameMapping.AddRename( new FunctionImportReturnTypeStructuralTypeColumn( rename.SColumn, complexTypeMappings.First().ReturnType, false, rename.LineInfo)); ReturnTypeColumnsRenameMapping.Add(rename.CMember, columnRenameMapping); } // Initialize the entity mapping data as empty. NormalizedEntityTypeMappings = new ReadOnlyCollection<FunctionImportNormalizedEntityTypeMapping>( new List<FunctionImportNormalizedEntityTypeMapping>()); DiscriminatorColumns = new ReadOnlyCollection<string>( new List<string> { }); MappedEntityTypes = new ReadOnlyCollection<EntityType>( new List<EntityType> { }); } }
// utility function for finding the correct order to process directories List<SolutionFolderItem> CalculateSubDirOrder (AutotoolsContext ctx, SolutionFolder folder, SolutionConfiguration config) { List<SolutionFolderItem> resultOrder = new List<SolutionFolderItem>(); Set<SolutionFolderItem> dependenciesMet = new Set<SolutionFolderItem>(); Set<SolutionFolderItem> inResult = new Set<SolutionFolderItem>(); // We don't have to worry about projects built in parent combines dependenciesMet.Union (ctx.GetBuiltProjects ()); bool added; string notMet; do { added = false; notMet = null; List<SolutionFolderItem> items = new List<SolutionFolderItem> (); GetSubItems (items, folder); foreach (SolutionFolderItem item in items) { Set<SolutionFolderItem> references, provides; if (inResult.Contains (item)) continue; if (item is SolutionItem) { SolutionItem entry = (SolutionItem) item; if (!config.BuildEnabledForItem (entry)) continue; references = new Set<SolutionFolderItem> (); provides = new Set<SolutionFolderItem>(); references.Union (entry.GetReferencedItems (config.Selector)); provides.Add (entry); } else if (item is SolutionFolder) { GetAllProjects ((SolutionFolder) item, config, out provides, out references); } else continue; if (dependenciesMet.ContainsSet (references) ) { resultOrder.Add (item); dependenciesMet.Union(provides); inResult.Add(item); added = true; } else notMet = item.Name; } } while (added); if (notMet != null) throw new Exception("Impossible to find a solution order that satisfies project references for '" + notMet + "'"); return resultOrder; }
void PublishDir (Set<FilePath> dirs, FilePath dir, bool rec, IProgressMonitor monitor) { string ndir = (string) dir; while (ndir[ndir.Length - 1] == Path.DirectorySeparatorChar) ndir = ndir.Substring (0, ndir.Length - 1); dir = ndir; if (dirs.Contains (dir)) return; dirs.Add (dir); if (rec) { PublishDir (dirs, dir.ParentDirectory, true, monitor); Add (dir, false, monitor); } }
static internal bool TryFirstWsInList(SIL.FieldWorks.Common.COMInterfaces.ISilDataAccess sda, int hvo, int flid, int[] wssToTry, ref Set<int> wssTried, out int retWs, out ITsString retTss) { retTss = null; retWs = 0; foreach (int wsLoop in wssToTry) { if (wssTried.Contains(wsLoop)) continue; wssTried.Add(wsLoop); retTss = sda.get_MultiStringAlt(hvo, flid, wsLoop); if (retTss.Length > 0) { retWs = wsLoop; return true; } } return false; }
/// <summary> /// find the best existing LexEntry option matching 'homographform' (and possibly 'sDefnTarget') /// in order to determine if we should merge leTarget into that entry. /// </summary> /// <param name="cache"></param> /// <param name="homographForm"></param> /// <param name="sDefnTarget"></param> /// <param name="leTarget">a LexEntry that you want to consider merging into a more appropriate LexEntry, /// if null, we ignore 'newHvos' and 'hvoDomain'</param> /// <param name="newHvos"></param> /// <param name="hvoDomain"></param> /// <param name="fGotExactMatch"></param> /// <returns></returns> private static ILexEntry FindBestLexEntryAmongstHomographs(FdoCache cache, string homographForm, string sDefnTarget, ILexEntry leTarget, Set<int> newHvos, int hvoDomain, out bool fGotExactMatch) { ILexEntry leSaved = null; List<ILexEntry> rgEntries = LexEntry.CollectHomographs(homographForm, 0, LexEntry.GetHomographList(cache, homographForm), MoMorphType.kmtStem, true); leSaved = null; // saved entry to merge into (from previous iteration) bool fSavedIsOld = false; // true if leSaved is old (and non-null). fGotExactMatch = false; // true if we find a match for cf AND defn. bool fCurrentIsNew = false; foreach (ILexEntry leCurrent in rgEntries) { if (leTarget != null) { if (leCurrent.Hvo == leTarget.Hvo) continue; // not interested in merging with ourself. // See if this is one of the newly added entries. If it is, it has exactly one sense, // and that sense is in our list. fCurrentIsNew = leCurrent.SensesOS.Count == 1 && newHvos.Contains(leCurrent.SensesOS.HvoArray[0]); if (fCurrentIsNew && leCurrent.Hvo > leTarget.Hvo) continue; // won't consider ANY kind of merge with a new object of greater HVO. } // Decide whether lexE should be noted as the entry that we will merge with if // we don't find an exact match. if (!fGotExactMatch) // leMerge is irrelevant if we already got an exact match. { if (leSaved == null) { leSaved = leCurrent; fSavedIsOld = !fCurrentIsNew; } else // we have already found a candidate { if (fSavedIsOld) { // We will only consider the new one if it is also old, and // (rather arbitrarily) if it has a smaller HVO if ((!fCurrentIsNew) && leCurrent.Hvo < leSaved.Hvo) { leSaved = leCurrent; // fSavedIsOld stays true. } } else // we already have a candidate, but it is another of the new entries { // if current is old, we'll use it for sure if (!fCurrentIsNew) { leSaved = leCurrent; fSavedIsOld = false; // since fCurrentIsNew is false. } else { // we already have a new candidate (which must have a smaller hvo than target) // and now we have another new entry which matches! // We'll prefer it only if its hvo is smaller still. if (leCurrent.Hvo < leSaved.Hvo) { leSaved = leCurrent; // fSavedIsOld stays false. } } } } } // see if we want to try to find a matching existing sense. if (sDefnTarget == null) continue; // This deals with all senses in the entry, // whether owned directly by the entry or by its senses // at whatever level. // If the new definition matches an existing defintion (or if both // are missing) add the current domain to the existing sense. // Note: if more than one sense has the same definition (maybe missing) we should // add the domain to all senses--not just the first one encountered. foreach (ILexSense lexS in leCurrent.AllSenses) { if (lexS.Definition != null && lexS.Definition.AnalysisDefaultWritingSystem != null) { string sDefnCurrent = lexS.Definition.AnalysisDefaultWritingSystem.UnderlyingTsString.Text; if ((sDefnCurrent == null && sDefnTarget == null) || (sDefnCurrent != null && sDefnTarget != null && sDefnCurrent.Trim() == sDefnTarget.Trim())) { // We found a sense that has the same citation form and definition as the one // we're trying to merge. // Add the new domain to that sense (if not already present), delete the temporary one, // and return. (We're not displaying this sense, so don't bother trying to update the display) if (hvoDomain > 0 && !lexS.SemanticDomainsRC.Contains(hvoDomain)) lexS.SemanticDomainsRC.Add(hvoDomain); fGotExactMatch = true; } } } } // loop over matching entries return leSaved; }
/// <summary> /// Delete any MSAs that are no longer referenced by a sense (or subsense). /// </summary> private void DeleteUnusedMSAs() { Set<int> msasUsed = new Set<int>(); foreach (int hvo in this.AllSenseHvos) { int hvoMsa = m_cache.GetObjProperty(hvo, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis); if (hvoMsa != 0) msasUsed.Add(hvoMsa); } Set<int> hvosObsolete = new Set<int>(); foreach (int hvo in this.MorphoSyntaxAnalysesOC.HvoArray) { if (!msasUsed.Contains(hvo)) hvosObsolete.Add(hvo); } if (hvosObsolete.Count > 0) CmObject.DeleteObjects(hvosObsolete, m_cache); }
/// <summary> /// Insert a new object of the specified class into the specified property of your object. /// </summary> /// <returns>-1 if unsuccessful -2 if unsuccessful and no further attempts should be made, /// otherwise, index of new object (0 if collection)</returns> int InsertObject(int flid, int newObjectClassId) { CheckDisposed(); bool fAbstract = m_cache.DomainDataByFlid.MetaDataCache.GetAbstract(newObjectClassId); if (fAbstract) { // We've been handed an abstract class to insert. Try to determine the desired // concrete from the context. if (newObjectClassId == MoFormTags.kClassId && Object is ILexEntry) { var entry = (Object as ILexEntry); newObjectClassId = entry.GetDefaultClassForNewAllomorph(); } else { return -1; } } // OK, we can add to property flid of the object of slice slice. int insertionPosition = 0;//leave it at 0 if it does not matter int hvoOwner = Object.Hvo; int clidOwner = Object.ClassID; int clidOfFlid = flid / 1000; if (clidOwner != clidOfFlid && clidOfFlid == Object.Owner.ClassID) { hvoOwner = Object.Owner.Hvo; } int type = GetFieldType(flid); if (type == (int)CellarPropertyType.OwningSequence) { try { // We might not be on the right slice to insert this item. See FWR-898. insertionPosition = Cache.DomainDataByFlid.get_VecSize(hvoOwner, flid); } catch { return -1; } if (ContainingDataTree != null && ContainingDataTree.CurrentSlice != null) { ISilDataAccess sda = m_cache.DomainDataByFlid; int chvo = insertionPosition; // See if the current slice in any way indicates a position in that property. object[] key = ContainingDataTree.CurrentSlice.Key; bool fGotIt = false; for (int ikey = key.Length - 1; ikey >= 0 && !fGotIt; ikey--) { if (!(key[ikey] is int)) continue; var hvoTarget = (int)key[ikey]; for (int i = 0; i < chvo; i++) { if (hvoTarget == sda.get_VecItem(hvoOwner, flid, i)) { insertionPosition = i + 1; // insert after current object. fGotIt = true; // break outer loop break; } } } } } var slices = new Set<Slice>(ContainingDataTree.Slices); // Save DataTree for the finally block. Note premature return below due to IsDisposed. See LT-9005. DataTree dtContainer = ContainingDataTree; try { dtContainer.SetCurrentObjectFlids(hvoOwner, flid); var fieldType = (CellarPropertyType) m_cache.MetaDataCacheAccessor.GetFieldType(flid); switch (fieldType) { case CellarPropertyType.OwningCollection: insertionPosition = -1; break; case CellarPropertyType.OwningAtomic: insertionPosition = -2; break; } using (CmObjectUi uiObj = CmObjectUi.CreateNewUiObject(m_mediator, newObjectClassId, hvoOwner, flid, insertionPosition)) { // If uiObj is null, typically CreateNewUiObject displayed a dialog and the user cancelled. // We return -1 to make the caller give up trying to insert, so we don't get another dialog if // there is another slice that could insert this kind of object. // If 'this' isDisposed, typically the inserted object occupies a place in the record list for // this view, and inserting an object caused the list to be refreshed and all slices for this // record to be disposed. In that case, we won't be able to find a child of this to activate, // so we'll just settle for having created the object. // Enhance JohnT: possibly we could load information from the slice into local variables before // calling CreateNewUiObject so that we could do a better job of picking the slice to focus // after an insert which disposes 'this'. Or perhaps we could improve the refresh list process // so that it more successfully restores the current item without disposing of all the slices. if (IsDisposed) return -1; if (uiObj == null) return -2; // Nothing created. switch (fieldType) { case CellarPropertyType.OwningCollection: // order is not fully predicatable, figure where it DID show up. insertionPosition = m_cache.DomainDataByFlid.GetObjIndex(hvoOwner, flid, uiObj.Object.Hvo); break; case CellarPropertyType.OwningAtomic: insertionPosition = 0; break; } // if (ihvoPosition == ClassAndPropInfo.kposNotSet && cpi.fieldType == DataTree.kcptOwningSequence) // { // // insert at end of sequence. // ihvoPosition = cache.DomainDataByFlid.get_VecSize(hvoOwner, (int)cpi.flid); // } // otherwise we already worked out the position or it doesn't matter // // Note: ihvoPosition ignored if sequence(?) or atomic. // int hvoNew = cache.CreateObject((int)(cpi.signatureClsid), hvoOwner, (int)(cpi.flid), ihvoPosition); // cache.DomainDataByFlid.PropChanged(null, (int)PropChangeType.kpctNotifyAll, hvoOwner, (int)(cpi.flid), ihvoPosition, 1, 0); if (hvoOwner == Object.Hvo && Expansion == DataTree.TreeItemState.ktisCollapsed) { // We added something to the object of the current slice...almost certainly it // will be something that will display under this node...if it is still collapsed, // expand it to show the thing inserted. TreeNode.ToggleExpansion(IndexInContainer); } Slice child = ExpandSubItem(uiObj.Object.Hvo); if (child != null) child.FocusSliceOrChild(); else { // If possible, jump to the newly inserted sub item. if (m_mediator.BroadcastMessageUntilHandled("JumpToRecord", uiObj.Object.Hvo)) return insertionPosition; // If we haven't found a slice...common now, because there's rarely a need to expand anything... // and some slice was added, focus it. foreach (Slice slice in Parent.Controls) { if (!slices.Contains(slice)) { slice.FocusSliceOrChild(); break; } } } } } finally { dtContainer.ClearCurrentObjectFlids(); } return insertionPosition; }
/// <summary> /// Check that a ring does not self-intersect, except at its endpoints. /// Algorithm is to count the number of times each node along edge occurs. /// If any occur more than once, that must be a self-intersection. /// </summary> private void CheckNoSelfIntersectingRing(EdgeIntersectionList eiList) { Set<Coordinate> nodeSet = new Set<Coordinate>(); bool isFirst = true; foreach(EdgeIntersection ei in eiList) { if (isFirst) { isFirst = false; continue; } if (nodeSet.Contains(ei.Coordinate)) { validErr = new TopologyValidationError(TopologyValidationErrors.RingSelfIntersection, ei.Coordinate); return; } else nodeSet.Add(ei.Coordinate); } }
public void assembleMinimumCoveringSet(ClusterInfo c) { if (c.proteinGroups.Count == 1) // degenerate case { foreach (ProteinGroupInfo proGroup in c.proteinGroups) proGroup.uniquePeptideCount = int.MaxValue; // value is n/a return; } /*Set<ResultInfo> clusterResults = new Set<ResultInfo>( c.results ); ProteinGroupList clusterGroups = new ProteinGroupList(); foreach( ProteinGroupInfo proGroup in c.proteinGroups ) clusterGroups.Add( proGroup ); //Console.WriteLine(); while( clusterResults.Count > 0 ) { List<ProteinGroupInfo> minRemainingResults = new List<ProteinGroupInfo>(); int minRemainingResultCount = clusterResults.Count; //int n = 0; //Console.WriteLine( "groups: " + clusterGroups.Count + "; results: " + clusterResults.Count ); foreach( ProteinGroupInfo proGroup in clusterGroups ) { //Console.Write( n++ + " of " + clusterGroups.Count + "\r" ); int count = clusterResults.Count; foreach( ResultInfo r in proGroup.results ) if( clusterResults.Contains( r ) ) --count; if( count <= minRemainingResultCount ) { if( count < minRemainingResultCount ) minRemainingResults.Clear(); minRemainingResults.Add( proGroup ); } } ProteinGroupInfo mostGreedyGroup = minRemainingResults[0]; minRemainingResults.Clear(); int oldCount = clusterResults.Count; clusterResults.Subtract( mostGreedyGroup.results ); if( clusterResults.Count >= oldCount ) { Console.Error.WriteLine( "Something has gone terribly wrong!" ); System.Diagnostics.Process.GetCurrentProcess().Kill(); } mostGreedyGroup.minSet = true; clusterGroups.Remove( mostGreedyGroup ); }*/ // Get the results in the cluster Set<ResultInfo> clusterResults = new Set<ResultInfo>(c.results); // Get the protein groups in the cluster ProteinGroupList clusterGroups = new ProteinGroupList(); foreach (ProteinGroupInfo proGroup in c.proteinGroups) clusterGroups.Add(proGroup); //Console.WriteLine(); // while there are results in the cluster while (clusterResults.Count > 0) { // Maps the number of results to a protein group Map<int, List<ProteinGroupInfo>> remainingResults = new Map<int, List<ProteinGroupInfo>>(); //int n = 0; //Console.WriteLine( "groups: " + clusterGroups.Count + "; results: " + clusterResults.Count ); // Iterate through protein groups foreach (ProteinGroupInfo proGroup in clusterGroups) { //Console.Write( n++ + " of " + clusterGroups.Count + "\n" ); // Get the number of results in the cluster int count = clusterResults.Count; // Iterate over the cluster results and see how // many cluster group results can be explained // by that protein group foreach (ResultInfo r in proGroup.results) { if (clusterResults.Contains(r)) --count; } // Map the number of remaining results to that // protein group remainingResults[count].Add(proGroup); } // Take the first protein group that can explain the most results ProteinGroupInfo mostGreedyGroup = remainingResults.Values[0][0]; // Subtract its results from the cluster results mostGreedyGroup.uniquePeptideCount = clusterResults.Count - remainingResults.Keys[0]; clusterResults.Subtract(mostGreedyGroup.results); // Remove the most greedy group from the cluster groups clusterGroups.Remove(mostGreedyGroup); } }
/// <summary> /// Get the set of significant digraphs (multigraphs) for the writing system. At the /// moment, these are derived from ICU sorting rules associated with the writing system. /// </summary> private Set<string> GetDigraphs(string sWs, out Dictionary<string, string> mapChars) { Set<string> digraphs = null; if (m_mapWsDigraphs.TryGetValue(sWs, out digraphs)) { mapChars = m_mapWsMapChars[sWs]; return digraphs; } digraphs = new Set<string>(); mapChars = new Dictionary<string, string>(); int ws = m_cache.LanguageWritingSystemFactoryAccessor.GetWsFromStr(sWs); IWritingSystem wsX = null; ICollation coll = null; string sIcuRules = null; if (ws > 0) { wsX = m_cache.LanguageWritingSystemFactoryAccessor.get_EngineOrNull(ws); if (wsX.CollationCount > 0) { coll = wsX.get_Collation(0); sIcuRules = coll.IcuRules; if (String.IsNullOrEmpty(sIcuRules)) { // The ICU rules may not be loaded for built-in languages, but are // still helpful for our purposes here. string sIcuOrig = sIcuRules; coll.LoadIcuRules(sWs); sIcuRules = coll.IcuRules; coll.IcuRules = sIcuOrig; // but we don't want to actually change anything! } } } if (!String.IsNullOrEmpty(sIcuRules) && sIcuRules.Contains("&")) { string[] rgsRules = sIcuRules.Split(new char[] { '&' }, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < rgsRules.Length; ++i) { string sRule = rgsRules[i]; // This is a valid rule that specifies that the digraph aa should be ignored // [last tertiary ignorable] = \u02bc = aa // but the code here will ignore this. YAGNI the chances of a user specifying a digraph // as ignorable may never happen. if (sRule.Contains("[")) sRule = sRule.Substring(0, sRule.IndexOf("[")); if (String.IsNullOrEmpty(sRule.Trim())) continue; sRule = sRule.Replace("<<<", "="); sRule = sRule.Replace("<<", "="); if (sRule.Contains("<")) { // "&N<ng<<<Ng<ny<<<Ny" => "&N<ng=Ng<ny=Ny" // "&N<ñ<<<Ñ" => "&N<ñ=Ñ" // There are other issues we are not handling proplerly such as the next line // &N<\u006e\u0067 string[] rgsPieces = sRule.Split(new char[] { '<', '=' }, StringSplitOptions.RemoveEmptyEntries); for (int j = 0; j < rgsPieces.Length; ++j) { string sGraph = rgsPieces[j]; sGraph = sGraph.Trim(); if (String.IsNullOrEmpty(sGraph)) continue; sGraph = Icu.Normalize(sGraph, Icu.UNormalizationMode.UNORM_NFD); if (sGraph.Length > 1) { sGraph = Icu.ToLower(sGraph, sWs); if (!digraphs.Contains(sGraph)) digraphs.Add(sGraph); } } } else if (sRule.Contains("=")) { // "&ae<<æ<<<Æ" => "&ae=æ=Æ" string[] rgsPieces = sRule.Split(new char[] { '=' }, StringSplitOptions.RemoveEmptyEntries); string sGraphPrimary = rgsPieces[0].Trim(); Debug.Assert(!String.IsNullOrEmpty(sGraphPrimary)); sGraphPrimary = Icu.ToLower(sGraphPrimary, sWs); for (int j = 1; j < rgsPieces.Length; ++j) { string sGraph = rgsPieces[j]; sGraph = sGraph.Trim(); if (String.IsNullOrEmpty(sGraph)) continue; sGraph = Icu.Normalize(sGraph, Icu.UNormalizationMode.UNORM_NFD); sGraph = Icu.ToLower(sGraph, sWs); if (sGraph != sGraphPrimary) { if (!mapChars.ContainsKey(sGraph)) mapChars.Add(sGraph, sGraphPrimary); } } } } } m_mapWsDigraphs.Add(sWs, digraphs); m_mapWsMapChars.Add(sWs, mapChars); return digraphs; }
/// <summary> /// Compute depths for all dirEdges via breadth-first traversal of nodes in graph. /// </summary> /// <param name="startEdge">Edge to start processing with.</param> // <FIX> MD - use iteration & queue rather than recursion, for speed and robustness private void ComputeDepths(DirectedEdge startEdge) { Set<Node> nodesVisited = new Set<Node>(); Queue nodeQueue = new Queue(); Node startNode = startEdge.Node; nodeQueue.Enqueue(startNode); nodesVisited.Add(startNode); startEdge.Visited = true; while (nodeQueue.Count != 0) { Node n = (Node) nodeQueue.Dequeue(); nodesVisited.Add(n); // compute depths around node, starting at this edge since it has depths assigned ComputeNodeDepth(n); // add all adjacent nodes to process queue, unless the node has been visited already IEnumerator i = ((DirectedEdgeStar)n.Edges).GetEnumerator(); while (i.MoveNext()) { DirectedEdge de = (DirectedEdge) i.Current; DirectedEdge sym = de.Sym; if (sym.IsVisited) continue; Node adjNode = sym.Node; if (!(nodesVisited.Contains(adjNode))) { nodeQueue.Enqueue(adjNode); nodesVisited.Add(adjNode); } } } }
/// <summary> /// Return true if the specified paragraph needs its guesses updated when we've changed something about the analyses /// or occurrenes of analyses of one of the specified wordforms. /// </summary> private bool NeedsGuessesUpdated(int hvoPara, Set<int> wordforms) { int ktagParaSegments = StTxtPara.SegmentsFlid(Cache); int ktagSegmentForms = InterlinVc.SegmentFormsTag(Cache); ISilDataAccess sda = m_fdoCache.MainCacheAccessor; // If we haven't already figured the segments of a paragraph, we don't need to update it; the guesses will // get made when scrolling makes the paragraph visible. if (!sda.get_IsPropInCache(hvoPara, ktagParaSegments, (int)CellarModuleDefns.kcptReferenceSequence, 0)) return false; int cseg = sda.get_VecSize(hvoPara, ktagParaSegments); for (int iseg = 0; iseg < cseg; iseg++) { int hvoSeg = sda.get_VecItem(hvoPara, ktagParaSegments, iseg); int cxfic = sda.get_VecSize(hvoSeg, ktagSegmentForms); for (int ixfic = 0; ixfic < cxfic; ixfic++) { int hvoWfic = sda.get_VecItem(hvoSeg, ktagSegmentForms, ixfic); int hvoInstanceOf = sda.get_ObjectProp(hvoWfic, (int) CmAnnotation.CmAnnotationTags.kflidInstanceOf); if (hvoInstanceOf == 0) continue; // punctuation, doesn't need guess if (Cache.GetClassOfObject(hvoInstanceOf) == WfiGloss.kclsidWfiGloss) continue; // fully glossed, no need to update. if (wordforms.Contains(WfiWordform.GetWordformFromWag(Cache, hvoInstanceOf))) return true; // This paragraph IS linked to one of the interesting wordforms; needs guesses updated } } return false; // no Wfics that might be affected. }
/// <summary> /// Perform a fixpoint computation over a ser of methods (in general a strongly connected component). /// It perform the interprocedural analysis for each method, reanalysing any callers that require updating. /// </summary> protected virtual Set<Method> FixPoint() { Set<Method> analyzed = new Set<Method>(); Set<Method> methodsInFixPoint = new Set<Method>(methodsToAnalyze); while (methodsToAnalyze.Count != 0) { Method m = methodsToAnalyze[0]; if (verbose) Console.Out.WriteLine("Now Analyzing {0} left: {2} Unsafe? {1} ", m.GetFullUnmangledNameWithTypeParameters(), IsUnsafe(m), methodsToAnalyze.Count); if (PointsToAnalysis.debug) { if (m.FullName.Contains("PaintDotNet.GradientRenderer.Render")) { // System.Diagnostics.Debugger.Break(); } } analyzed.Add(m); methodsToAnalyze.RemoveAt(0); bool hasChanged = false; // Perform the IntraProcedural of the Method // if it wasn't analyzed before // Analyzed means that it was complety analyzed in a previous // fix point computation, so its value is not going to change if (!WasAnalyzed(m)) hasChanged = AnalyzeMethod(m); //else // Console.Out.WriteLine(" was already analyzed!"); // If a method changes, we have to reanalyze the callers if (hasChanged && IsAnalyzable(m)) { // I should change this to cg.Callers(m).... foreach (Method caller in GetCallers(m)) { if (!methodsToAnalyze.Contains(caller)) { //if (alreadyAnalyzedMethods.Contains(caller) || methodsInFixPoint.Contains(caller)) if (methodsInFixPoint.Contains(caller)) { methodsToAnalyze.Add(caller); if (verbose) Console.Out.WriteLine("\t reanalyzing {0}", caller.FullName); } } } //foreach (Method caller in GetCallers(m)) //{ // Console.Out.WriteLine("\t Reanalyzing {0}", caller.GetFullUnmangledNameWithTypeParameters()); //} } #region debugging, Delete this! if (verbose || debug) { counter++; if (counter % 1000 == 0) { Console.Out.WriteLine("Now Analyzing {0} Unsafe? {1} {2} To Analize: {3}", m.GetFullUnmangledNameWithTypeParameters(), IsUnsafe(m), counter, methodsToAnalyze.Count); foreach (Method mt in methodsToAnalyze) { Console.Out.WriteLine("\t {0}", mt.GetFullUnmangledNameWithTypeParameters()); } } } #endregion } return analyzed; }
public void AddTests() { Set<int> set = new Set<int>(); Assert.AreEqual(0, set.Count, "Set should be empty at first."); // Add an integer. set.Add(1); Assert.AreEqual(1, set.Count, "Set should have one integer by now."); Assert.IsTrue(set.Contains(1), "Set should contain the integer 1."); }
/// <summary> /// Side effects of deleting the underlying object. /// This is complicated by the existence of homograph numbers. /// </summary> public override void DeleteObjectSideEffects(Set<int> objectsToDeleteAlso, ProgressState state) { // Reset homograph numbers of any similar entries, if any. // This version of 'CollectHomographs' leaves out this object, // which is a good thing. ValidateExistingHomographs(CollectHomographs()); // Call delete side effects for our owned properties. foreach (ILexSense sense in SensesOS) sense.DeleteObjectSideEffects(objectsToDeleteAlso, state); foreach (IMoMorphSynAnalysis msa in MorphoSyntaxAnalysesOC) msa.DeleteObjectSideEffects(objectsToDeleteAlso, state); foreach (IMoForm mf in AlternateFormsOS) mf.DeleteObjectSideEffects(objectsToDeleteAlso, state); foreach (ILexPronunciation lp in PronunciationsOS) lp.DeleteObjectSideEffects(objectsToDeleteAlso, state); if (EtymologyOAHvo > 0) EtymologyOA.DeleteObjectSideEffects(objectsToDeleteAlso, state); if (LexemeFormOAHvo > 0) LexemeFormOA.DeleteObjectSideEffects(objectsToDeleteAlso, state); // Deal with critical inbound references on entry and objects it owns. // The idea here is to delete any objects that refer to the entry, // but ONLY if those objects would then be invalid. // We call DeleteUnderlyingObject() directly on any high risk objects, // such as MSAs,MoForms, and senses, since the regular deletion SP would not delete // any other invalid objects. List<LinkedObjectInfo> linkedObjs = LinkedObjects; List<int> deletedObjectIDs = new List<int>(); foreach (LinkedObjectInfo loi in linkedObjs) { switch (loi.RelObjClass) { default: break; case WfiAnalysis.kclsidWfiAnalysis: { IWfiAnalysis anal = WfiAnalysis.CreateFromDBObject(m_cache, loi.RelObjId); if (loi.RelObjField == (int)WfiAnalysis.WfiAnalysisTags.kflidStems) { if (!deletedObjectIDs.Contains(anal.Hvo)) { deletedObjectIDs.Add(anal.Hvo); foreach (IWfiMorphBundle mb in anal.MorphBundlesOS) deletedObjectIDs.Add(mb.Hvo); anal.DeleteObjectSideEffects(objectsToDeleteAlso, state); } } break; } case MoAlloAdhocProhib.kclsidMoAlloAdhocProhib: { if (loi.RelObjField == (int)MoAlloAdhocProhib.MoAlloAdhocProhibTags.kflidFirstAllomorph) { if (!deletedObjectIDs.Contains(loi.RelObjId)) { deletedObjectIDs.Add(loi.RelObjId); IMoAlloAdhocProhib obj = MoAlloAdhocProhib.CreateFromDBObject(m_cache, loi.RelObjId); obj.DeleteObjectSideEffects(objectsToDeleteAlso, state); } } break; } case MoMorphAdhocProhib.kclsidMoMorphAdhocProhib: { if (loi.RelObjField == (int)MoMorphAdhocProhib.MoMorphAdhocProhibTags.kflidFirstMorpheme) { if (!deletedObjectIDs.Contains(loi.RelObjId)) { deletedObjectIDs.Add(loi.RelObjId); IMoMorphAdhocProhib obj = MoMorphAdhocProhib.CreateFromDBObject(m_cache, loi.RelObjId); obj.DeleteObjectSideEffects(objectsToDeleteAlso, state); } } break; } case WfiMorphBundle.kclsidWfiMorphBundle: { if (loi.RelObjField == (int)WfiMorphBundle.WfiMorphBundleTags.kflidMorph || loi.RelObjField == (int)WfiMorphBundle.WfiMorphBundleTags.kflidSense || loi.RelObjField == (int)WfiMorphBundle.WfiMorphBundleTags.kflidMsa) { if (!deletedObjectIDs.Contains(loi.RelObjId)) { IWfiMorphBundle mb = WfiMorphBundle.CreateFromDBObject(m_cache, loi.RelObjId); if (!deletedObjectIDs.Contains(mb.OwnerHVO)) { IWfiAnalysis anal = WfiAnalysis.CreateFromDBObject(m_cache, mb.OwnerHVO); deletedObjectIDs.Add(anal.Hvo); foreach (IWfiMorphBundle mbInner in anal.MorphBundlesOS) deletedObjectIDs.Add(mbInner.Hvo); anal.DeleteObjectSideEffects(objectsToDeleteAlso, state); } } } break; } case LexReference.kclsidLexReference: { if (objectsToDeleteAlso.Contains(loi.RelObjId)) break; LexReference lr = (LexReference)LexReference.CreateFromDBObject(m_cache, loi.RelObjId); // Delete the Lexical relationship if it will be broken after removing this target. if (lr.IncompleteWithoutTarget(this.Hvo)) lr.DeleteObjectSideEffects(objectsToDeleteAlso, state); else lr.TargetsRS.Remove(this.Hvo); break; } } } base.DeleteObjectSideEffects(objectsToDeleteAlso, state); }
/// <summary> /// Confirm the set of cache rules in the given execution context includes a set of expected ones; assertion failure if not. /// </summary> /// <param name="ctx"></param> /// <param name="rules"></param> void VerifyCacheRules(ExecutionContext ctx, IList rules) { Set set = new Set(); foreach (CacheRule rule in ctx.CacheRules) set.AddRange(rule.AllLeafRules); foreach (CacheRule r in rules) { Assert.IsTrue(set.Contains(r), "Unable to find expected CacheRule (" + r.Description + ")"); set.Remove(r); } }
/// <summary> /// Side effects of deleting the underlying object. This is complicated by the existence of homograph /// numbers. /// </summary> public override void DeleteObjectSideEffects(Set<int> objectsToDeleteAlso, ProgressState state) { // Recursively call DeleteUnderlyingObject() on subsenses. List<LexSense> senses = new List<LexSense>(); foreach (LexSense sense in SensesOS) { // Put them in a temporary holder, // since looping over the main prop isn't good, // when they are being deleted. senses.Add(sense); } foreach (LexSense sense in senses) sense.DeleteObjectSideEffects(objectsToDeleteAlso, state); if (MorphoSyntaxAnalysisRAHvo > 0) MorphoSyntaxAnalysisRAHvo = 0; // This will allow the MSA to be deleted, if appropriate. // Deal with critical inbound references on entry and objects it owns. // The idea here is to delete any objects that refer to the entry, // but ONLY if those objects would then be invalid. // We call DeleteUnderlyingObject() directly on any high risk objects, // such as MSAs,MoForms, and senses, since the regular deletion SP would not delete // any other invalid objects. List<LinkedObjectInfo> linkedObjs = LinkedObjects; List<int> deletedObjectIDs = new List<int>(); foreach (LinkedObjectInfo loi in linkedObjs) { if (loi.RelObjClass == WfiMorphBundle.kclsidWfiMorphBundle) { if (loi.RelObjField == (int)WfiMorphBundle.WfiMorphBundleTags.kflidSense) { if (!deletedObjectIDs.Contains(loi.RelObjId)) { IWfiMorphBundle mb = WfiMorphBundle.CreateFromDBObject(m_cache, loi.RelObjId); if (!deletedObjectIDs.Contains(mb.OwnerHVO)) { IWfiAnalysis anal = WfiAnalysis.CreateFromDBObject(m_cache, mb.OwnerHVO); deletedObjectIDs.Add(anal.Hvo); foreach (IWfiMorphBundle mbInner in anal.MorphBundlesOS) deletedObjectIDs.Add(mbInner.Hvo); anal.DeleteObjectSideEffects(objectsToDeleteAlso, state); } } } } else if (loi.RelObjClass == (int)LexReference.kclsidLexReference && !objectsToDeleteAlso.Contains(loi.RelObjId)) { LexReference lr = (LexReference)LexReference.CreateFromDBObject(m_cache, loi.RelObjId); // Delete the Lexical relationship if it will be broken after removing this target. if (lr.IncompleteWithoutTarget(this.Hvo)) lr.DeleteObjectSideEffects(objectsToDeleteAlso, state); else lr.TargetsRS.Remove(this.Hvo); } } base.DeleteObjectSideEffects(objectsToDeleteAlso, state); }
private List<SpectrumSourceGroup> applyAssemblyText(ISession session, string filepath) { var spectrumSources = session.Query<SpectrumSource>().ToList(); var sourcesByGroup = new Map<string, List<SpectrumSource>>(); var alreadyGroupedSources = new Set<string>(); var sourceGroups = new List<SpectrumSourceGroup>(); // open the assembly.txt file using (var assembleTxtFile = File.OpenText(filepath)) { string line; while ((line = assembleTxtFile.ReadLine()) != null) { if (line.Length == 0) continue; try { Regex groupFilemaskPair = new Regex("((\"(.+)\")|(\\S+))\\s+((\"(.+)\")|(\\S+))"); Match lineMatch = groupFilemaskPair.Match(line); string group = lineMatch.Groups[3].ToString() + lineMatch.Groups[4].ToString(); string filemask = lineMatch.Groups[7].ToString() + lineMatch.Groups[8].ToString(); // for wildcards, use old style behavior if (filemask.IndexOfAny("*?".ToCharArray()) > -1) { if (!Path.IsPathRooted(filemask)) filemask = Path.Combine(Path.GetDirectoryName(filepath), filemask); if (!sourcesByGroup.Contains(group)) sourcesByGroup[group] = new List<SpectrumSource>(); if (!Directory.Exists(Path.GetDirectoryName(filemask))) continue; var files = Directory.GetFiles(Path.GetDirectoryName(filemask), Path.GetFileName(filemask)); var sourceNames = files.Select(o => Path.GetFileNameWithoutExtension(o)); foreach (string sourceName in sourceNames) { var spectrumSource = spectrumSources.SingleOrDefault(o => o.Name == sourceName); if (spectrumSource == null) continue; var insertResult = alreadyGroupedSources.Insert(sourceName); if (insertResult.WasInserted) sourcesByGroup[group].Add(spectrumSource); } } else { // otherwise, match directly to source names string sourceName = Path.GetFileNameWithoutExtension(filemask); var spectrumSource = spectrumSources.SingleOrDefault(o => o.Name == sourceName); if (spectrumSource == null) continue; var insertResult = alreadyGroupedSources.Insert(sourceName); if (insertResult.WasInserted) sourcesByGroup[group].Add(spectrumSource); } } catch (Exception ex) { Program.HandleException(new Exception("Error reading assembly text from \"" + filepath + "\": " + ex.Message, ex)); } } } // remove existing groups RemoveGroupNode(_rootNode, false); sourceGroups.Add(new SpectrumSourceGroup { Name = "/" }); // build new group hierarchy foreach (var itr in sourcesByGroup) { if (itr.Value.IsNullOrEmpty()) continue; var ssg = new SpectrumSourceGroup { Name = itr.Key }; if (!alreadyGroupedSources.Contains(ssg.Name)) sourceGroups.Add(ssg); // decompose group path into segments, e.g. /foo/bar/ -> {foo, bar} IEnumerable<string> segments = ssg.Name.Split("/".ToCharArray(), StringSplitOptions.RemoveEmptyEntries); segments = segments.Take(segments.Count() - 1); // ignore the last segment var parentNode = _rootNode; foreach(string segment in segments) { var segmentNode = parentNode.Children.FirstOrDefault(o => o.Text == segment); if (segmentNode == null) { var segmentGroup = new SpectrumSourceGroup { Name = (parentNode.Text + "/").Replace("//", "/") + segment }; if (!alreadyGroupedSources.Contains(segmentGroup.Name)) sourceGroups.Add(segmentGroup); segmentNode = new tlvBranch { Text = segment, cms = cmRightClickGroupNode, Parent = parentNode, Children = new List<tlvBranch>(), Data = segmentGroup }; parentNode.Children.Add(segmentNode); } parentNode = segmentNode; } // parentNode is now the immediate parent of the current group var groupNode = new tlvBranch { Text = Path.GetFileName(ssg.Name), cms = cmRightClickGroupNode, Parent = parentNode, Children = new List<tlvBranch>(), Data = ssg }; foreach (var source in itr.Value) { var sourceNode = new tlvBranch { Text = Path.GetFileName(source.Name), Parent = groupNode, Data = source, cms = cmRightClickFileNode }; groupNode.Children.Add(sourceNode); lvNonGroupedFiles.Items.RemoveByKey(sourceNode.Text); } parentNode.Children.Add(groupNode); } tlvGroupedFiles.RefreshObject(_rootNode); tlvGroupedFiles.ExpandAll(); return sourceGroups.ToList(); }
/// <summary> /// This method is called by the ReversalEntriesText virtual handler when text may have changed in the /// property, in order to update the actual list of reversal entries appropriately. /// </summary> /// <param name="tssVal">The new string.</param> /// <param name="ws">The ws.</param> public void CommitReversalEntriesText(ITsString tssVal, int ws) { LexSenseReversalEntriesTextHandler vh = BaseVirtualHandler.GetInstalledHandler(m_cache, "LexSense", LexSenseReversalEntriesTextHandler.StandardFieldName) as LexSenseReversalEntriesTextHandler; Debug.Assert(vh != null, "The 'LexSenseReversalEntriesTextHandler' virtual handler has to be created at application startup now."); ITsString tssOld = vh.GetValue(m_hvo, ws); // The old and new values could be in another order, and this test won't catch that case. // That condition won't be fatal, however, so don't fret about it. if (tssOld.Equals(tssVal)) return; // no change has occurred string val = tssVal.Text; if (val == null) val = ""; // This will effectively cause any extant entries for the given 'ws' to be removed in the end. StringCollection formsColl = new StringCollection(); foreach (string form in val.Split(';')) { // These strings will be null, if there are two semi-colons together. // Or, it may be just whitespace, if it is '; ;'. if (form == null || form.Trim().Length == 0) continue; formsColl.Add(form.Trim()); } int[] senseEntries = ReversalEntriesRC.HvoArray; int originalSenseEntriesCount = senseEntries.Length; int indexId; DbOps.ReadOneIntFromCommand(m_cache, "SELECT id FROM ReversalIndex WHERE WritingSystem=?", ws, out indexId); ReversalIndex revIndex; if (indexId == 0) { // Create the missing reversal index instead of crashing. See LT-10186. ILgWritingSystem lgws = LgWritingSystem.CreateFromDBObject(m_cache, ws); IReversalIndex newIdx = m_cache.LangProject.LexDbOA.ReversalIndexesOC.Add(new ReversalIndex()); newIdx.WritingSystemRA = lgws; // Copy any and all alternatives from lgws.Name to newIdx.Name foreach (ILgWritingSystem lgwsLoop in m_cache.LanguageEncodings) { string lgsNameAlt = lgws.Name.GetAlternative(lgwsLoop.Hvo); if (lgsNameAlt != null && lgsNameAlt.Length > 0) newIdx.Name.SetAlternative(lgsNameAlt, lgws.Hvo); } revIndex = (ReversalIndex)newIdx; } else { revIndex = (ReversalIndex)CmObject.CreateFromDBObject(m_cache, indexId, false); } // We need the list of ReversalIndexEntries that this sense references, but which belong // to another reversal index. Those hvos, plus any entry hvos from the given 'ws' that are reused, // get put into 'survivingEntries'. Set<int> survivingEntries = new Set<int>(originalSenseEntriesCount + formsColl.Count); // 'entriesNeedingPropChangeBackRef' will hold the hvos of all ReversalIndexEntry objects that need to have // their 'ReferringSenses' virtual property (re)computed. // Any reversal index entry that gains or loses a reference will need this (re)computing. List<int> entriesNeedingPropChangeBackRef = new List<int>(originalSenseEntriesCount + formsColl.Count); foreach (int entryHvo in senseEntries) { // Use 'cheapo' FDO object maker, since it is supposed to all be in the cache already. ReversalIndexEntry rie = (ReversalIndexEntry)CmObject.CreateFromDBObject(m_cache, entryHvo, false); int wsIndex = 0; int hvoIndex = m_cache.GetOwnerOfObjectOfClass(rie.Hvo, ReversalIndex.kclsidReversalIndex); if (hvoIndex != 0) wsIndex = m_cache.GetIntProperty(hvoIndex, (int)ReversalIndex.ReversalIndexTags.kflidWritingSystem); if (wsIndex == ws) { string form = rie.LongName; if (formsColl.Contains(form)) { // Recycling an entry. survivingEntries.Add(rie.Hvo); formsColl.Remove(form); // Don't need to mess with it later on. } else { // It is being removed from the extant reference property, // so needs to recompute its back ref virtual handler. entriesNeedingPropChangeBackRef.Add(rie.Hvo); } } else { // These are all in some other ws, so they certainly must survive (cf. LT-3391). // Any entries that are reused will get added to this array later on. survivingEntries.Add(rie.Hvo); } } // Start Undoable section of code. m_cache.BeginUndoTask(Strings.ksUndoMakeRevEntries, Strings.ksRedoMakeRevEntries); ISilDataAccess sda = m_cache.MainCacheAccessor; IActionHandler acth = sda.GetActionHandler(); try { // add undo actions to reload the virtual handler and send prop changes to update displays if (acth != null) { List<PropChangedInfo> pciList = new List<PropChangedInfo>(); pciList.Add(new PropChangedInfo(m_hvo, vh.Tag, ws, 0, 0)); acth.AddAction(new PropChangedUndoAction(m_cache, true, PropChangeType.kpctNotifyAll, pciList)); acth.AddAction(new ReloadVirtualHandlerUndoAction(m_cache, true, vh, m_hvo, vh.Tag, ws)); } int cOldEntries = revIndex.EntriesOC.Count; foreach (string currentForm in formsColl) { int idRevEntry = revIndex.FindOrCreateReversalEntry(currentForm); entriesNeedingPropChangeBackRef.Add(idRevEntry); survivingEntries.Add(idRevEntry); } // Notify everyone, and his brother, about the changes done here. // PropChanged (1 of 3) Main: Replace main sense property with current set of entries. sda.Replace(m_hvo, (int)LexSense.LexSenseTags.kflidReversalEntries, 0, originalSenseEntriesCount, survivingEntries.ToArray(), survivingEntries.Count); sda.PropChanged(null, (int)PropChangeType.kpctNotifyAll, m_hvo, (int)LexSense.LexSenseTags.kflidReversalEntries, 0, survivingEntries.Count, originalSenseEntriesCount); // remove entries from the index that are no longer valid foreach (int rieHvo in senseEntries) { if (!survivingEntries.Contains(rieHvo)) { // the entry is no longer a reversal entry for this sense ReversalIndexEntry rie = new ReversalIndexEntry(m_cache, rieHvo); if (rie.SenseIds.Count == 0) // the entry is longer a reversal entry for any sense revIndex.EntriesOC.Remove(rie); } } // PropChanged (2 of 3) Affected Entries: (Re)compute // on the virtual property of select reversal index entries. ReversalIndexEntry.ResetReferringSenses(m_cache, entriesNeedingPropChangeBackRef); // PropChanged (3 of 3) Index Entries: Simulate a complete replacement of the entries collection, // BUT only if new entries were added in this method. int cNewEntries = revIndex.EntriesOC.Count; if (cNewEntries > cOldEntries) { sda.PropChanged(null, (int)PropChangeType.kpctNotifyAll, indexId, (int)ReversalIndex.ReversalIndexTags.kflidEntries, 0, cNewEntries, cOldEntries); } // add redo actions to reload the virtual handler and send prop changes to update displays if (acth != null) { acth.AddAction(new ReloadVirtualHandlerUndoAction(m_cache, false, vh, m_hvo, vh.Tag, ws)); List<PropChangedInfo> pciList = new List<PropChangedInfo>(); pciList.Add(new PropChangedInfo(m_hvo, vh.Tag, ws, 0, 0)); acth.AddAction(new PropChangedUndoAction(m_cache, false, PropChangeType.kpctNotifyAll, pciList)); } } finally { if (acth != null && Marshal.IsComObject(acth)) Marshal.ReleaseComObject(acth); } // End undoable section of code. m_cache.EndUndoTask(); }
private bool IsItemEligible(ISilDataAccess sda, int hvo, Set<int> possiblePOS) { bool fEnable = false; int hvoMsa = sda.get_ObjectProp(hvo, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis); if (hvoMsa != 0) { int clsid = m_cache.GetClassOfObject(hvoMsa); if (clsid == MoStemMsa.kClassId) { int pos = sda.get_ObjectProp(hvoMsa, (int)MoStemMsa.MoStemMsaTags.kflidPartOfSpeech); if (pos != 0 && possiblePOS.Contains(pos)) { // Only show it as a change if it is different int hvoFeature = sda.get_ObjectProp(hvoMsa, (int)MoStemMsa.MoStemMsaTags.kflidMsFeatures); fEnable = hvoFeature != m_selectedHvo; } } } return fEnable; }
private static Set<NamedWritingSystem> GetNamedWritingSystemsFromLDFs(ILgWritingSystemFactory wsf, string[] fileList, Set<NamedWritingSystem> namedWritingSystems) { Set<string> names = new Set<string>(); foreach (NamedWritingSystem nws in namedWritingSystems) names.Add(nws.IcuLocale); // Now add the ones from the XML files. foreach (string pathname in fileList) { string[] bits = pathname.Split('\\'); string filename = bits[bits.Length - 1]; bits = filename.Split('.'); string icuLocale = bits[0]; // Name up to first '.'. // The first test excludes names like en.xml1. if (bits[1] == "xml" && !names.Contains(icuLocale.ToLowerInvariant())) { try { // This will get the language name from the XML language def. file. This // should be the same name the user chose to call the language when creating // its writing system. LanguageDefinitionFactory ldf = new LanguageDefinitionFactory(wsf, icuLocale); if (ldf.LanguageDefinition == null) { System.Diagnostics.Debug.WriteLine("The XML file for " + icuLocale + " did not parse properly."); } else { string displayName = ldf.LanguageDefinition.DisplayName; // REVIEW: These two lines are how we used to get the display name. Now we read // it from the language def. file (i.e. the .xml file). Will the name from the // XML file always be in the display locale? //Icu.UErrorCode err; //Icu.GetDisplayName(icuLocale, displayLocale, out displayName, out err); // If it can't find a name, the normal behavior is to return the icuLocale. // If that happens we leave this one out. // If anything worse happens (e.g., that might produce a bad error code), // the other checks we make here should detect it. if (displayName != null && displayName != icuLocale && displayName.Length != 0) namedWritingSystems.Add(new NamedWritingSystem(displayName, icuLocale)); } } catch (FileNotFoundException e) { System.Diagnostics.Debug.WriteLine(e.Message); // LanguageDefinitionFactory can throw this error. Just ignore it. } } } return namedWritingSystems; }
/// <summary> /// If any of the nodes in treeNodeCollection has a tag that is an int that is in the set, /// expand it, and recursively check its children. /// </summary> /// <param name="treeNodeCollection"></param> /// <param name="expandedItems"></param> private void ExpandItems(TreeNodeCollection treeNodeCollection, Set<int> expandedItems) { foreach (TreeNode node in treeNodeCollection) { if (node.Tag is int && expandedItems.Contains((int)node.Tag)) { node.Expand(); ExpandItems(node.Nodes, expandedItems); } } }
/// <summary> /// On Refresh, we want to reload the XML configuration files. This greatly facilitates developing /// those files, even though it's not as useful for normal use. It might prove useful whenever we /// get around to allowing user customization (or it might not). /// </summary> /// <param name="sender"></param> /// <returns></returns> public bool OnRefresh(object sender) { CheckDisposed(); Set<string> setDatabases = new Set<string>(); foreach (FwXWindow wnd in m_rgMainWindows) { string sDatabase = wnd.Cache.DatabaseName; if (setDatabases.Contains(sDatabase)) continue; setDatabases.Add(sDatabase); Inventory.GetInventory("layouts", sDatabase).ReloadIfChanges(); Inventory.GetInventory("parts", sDatabase).ReloadIfChanges(); } return false; }
/// <summary> /// Given the mapping and followed by information from the file calculate how many texts will result from an import. /// </summary> /// <param name="mMappings"></param> /// <param name="dictionary"></param> /// <returns></returns> private int CalculateTextCount(List<InterlinearMapping> mMappings, Dictionary<string, Dictionary<string, int>> dictionary) { int count = 0; Set<string> headers = new Set<string>(); foreach (InterlinearMapping interlinearMapping in mMappings) { if(interlinearMapping.Destination == InterlinDestination.Id || interlinearMapping.Destination == InterlinDestination.Source || interlinearMapping.Destination == InterlinDestination.Comment || interlinearMapping.Destination == InterlinDestination.Title || interlinearMapping.Destination == InterlinDestination.Abbreviation) { headers.Add(interlinearMapping.Marker); } } // if no headers were mapped then only one text could result (and 0 would be counted) if (headers.Count == 0) return 1; //iterate through the data of markers and the counts of markers that follow them foreach (KeyValuePair<string, Dictionary<string, int>> markerAndFollowing in dictionary) { //if the marker is a header if(headers.Contains(markerAndFollowing.Key)) { //every time a header marker is followed by a non header it is the start of a text. //for every non header that follows a header marker add the occurence count to count. foreach (var followingMarker in markerAndFollowing.Value) { if (!headers.Contains(followingMarker.Key)) { count += followingMarker.Value; } } } } return count; }
static void CheckModifiedFiles () { // Check databases following a bottom-up strategy in the dependency // tree. This will help resolving parsed classes. Set<ProjectDom> list = new Set<ProjectDom> (); lock (databases) { // There may be several uris for the same db foreach (ProjectDom ob in databases.Values) list.Add (ob); } Set<ProjectDom> done = new Set<ProjectDom> (); while (list.Count > 0) { ProjectDom readydb = null; ProjectDom bestdb = null; int bestRefCount = int.MaxValue; // Look for a db with all references resolved foreach (ProjectDom db in list) { bool allDone = true; foreach (ProjectDom refdb in db.References) { if (!done.Contains (refdb)) { allDone = false; break; } } if (allDone) { readydb = db; break; } else if (db.References.Count < bestRefCount) { bestdb = db; bestRefCount = db.References.Count; break; } } // It may not find any db without resolved references if there // are circular dependencies. In this case, take the one with // less references if (readydb == null) readydb = bestdb; readydb.CheckModifiedFiles (); list.Remove (readydb); done.Add (readydb); } }
void ProcessNodesAddRemove(Set<Node> requiredNodes, Set<Node> existindNodes) { lock (this) { foreach (var node in existindNodes.Where(node => !requiredNodes.Contains(node))) HideVNode(node); foreach (var node in requiredNodes.Where(node => !existindNodes.Contains(node))) UnhideVNode(node); } }
private void CollectUnreachableTypes( Set<EntityType> reachableTypes, out KeyToListMap<EntityType, LineInfo> entityTypes, out KeyToListMap<EntityType, LineInfo> isTypeOfEntityTypes) { // Collect line infos for types in violation entityTypes = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); isTypeOfEntityTypes = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); if (reachableTypes.Count == MappedEntityTypes.Count) { // All types are reachable; nothing to check return; } // Find IsTypeOf mappings where no type in hierarchy can generate a row foreach (var isTypeOf in m_isTypeOfLineInfos.Keys) { if (!MetadataHelper.GetTypeAndSubtypesOf(isTypeOf, m_itemCollection, false) .Cast<EntityType>() .Intersect(reachableTypes) .Any()) { // no type in the hierarchy is reachable... isTypeOfEntityTypes.AddRange(isTypeOf, m_isTypeOfLineInfos.EnumerateValues(isTypeOf)); } } // Find explicit types not generating a value foreach (var entityType in m_entityTypeLineInfos.Keys) { if (!reachableTypes.Contains(entityType)) { entityTypes.AddRange(entityType, m_entityTypeLineInfos.EnumerateValues(entityType)); } } }
void InvalidateNodesOfRailGraph(Set<Node> nodesFromVectorTiles) { double zf = ZoomFactor; foreach (var o in _drawingObjectsToIViewerObjects.Values) { var vNode = o as GraphmapsNode; if (vNode != null) { vNode.InvalidateNodeDot(NodeDotWidth); if (vNode.LgNodeInfo == null) continue; ArrangeNodeLabel(vNode, zf); if (nodesFromVectorTiles.Contains(vNode.Node)) SetupTileNode(vNode); vNode.Node.Attr.LineWidth = GetBorderPathThickness(); if (vNode.LgNodeInfo == null) continue; double cs = CurrentScale; double nodeLabelHeight = _lgLayoutSettings.NodeLabelHeightInInches*DpiY/CurrentScale; double nodeLabelWidth = nodeLabelHeight*vNode.LgNodeInfo.LabelWidthToHeightRatio; if (vNode.LgNodeInfo.LabelVisibleFromScale >= 0 && vNode.LgNodeInfo.LabelVisibleFromScale <= zf) { var offset = Point.Scale(nodeLabelWidth + NodeDotWidth*1.01, nodeLabelHeight + NodeDotWidth*1.01, vNode.LgNodeInfo.LabelOffset); vNode.InvalidateNodeLabel(nodeLabelHeight, nodeLabelWidth, offset); } else if (_lgLayoutSettings.Interactor.SelectedNodeLabels.ContainsKey(vNode.LgNodeInfo)) { var pos = _lgLayoutSettings.Interactor.SelectedNodeLabels[vNode.LgNodeInfo]; var offset = Point.Scale(nodeLabelWidth + NodeDotWidth * 1.01, nodeLabelHeight + NodeDotWidth * 1.01, LgNodeInfo.GetLabelOffset(pos)); vNode.InvalidateNodeLabel(nodeLabelHeight, nodeLabelWidth, offset); } else { vNode.HideNodeLabel(); } } } }