/// <summary> /// Performs a "union" of the two sets, where all the elements /// in both sets are present. That is, the element is included if it is in either <c>a</c> or <c>b</c>. /// Neither this set nor the input set are modified during the operation. The return value /// is a <c>Clone()</c> of this set with the extra elements added in. /// </summary> /// <param name="a">A collection of elements.</param> /// <returns>A new <c>Set</c> containing the union of this <c>Set</c> with the specified collection. /// Neither of the input objects is modified by the union.</returns> public Set Union(Set a) { Set resultSet = (Set)this.Clone(); if(a != null) resultSet.AddAll(a); return resultSet; }
public BinaryDecisionTree Refine(string prop, Set<int> enabledStates) { // dont refine already added property if (this.property.Equals(prop)) return this; if (states.IsEmpty) return this; Set<int> s1 = states.Intersect(enabledStates); Set<int> s2 = states.Difference(enabledStates); if (this.trueEdge != null) this.trueEdge.Refine(prop,s1); if (this.falseEdge != null) this.falseEdge.Refine(prop,s2); if (this.trueEdge == null && this.falseEdge == null) { //if (!s1.IsEmpty) //{ this.trueEdge = new BinaryDecisionTree(prop, null, null,this.maxValue,this.minValue, s1); //} //else this.trueEdge = null; //if (!s2.IsEmpty) //{ this.falseEdge = new BinaryDecisionTree(prop, null, null,this.maxValue,this.minValue, s2); //} //else this.falseEdge = null; } return this; }
public CometServer() { _clients = new Dictionary<string, CometClient>(); _pathIndex = new Dictionary<string, List<CometClient>>(); _usernameIndex = new Dictionary<string, List<CometClient>>(); _requestPaths = new Set<string>(); }
/// <summary> /// Constructs a thread-safe <c>Set</c> wrapper. /// </summary> /// <param name="basisSet">The <c>Set</c> object that this object will wrap.</param> public SynchronizedSet(Set basisSet) { _basisSet = basisSet; _syncRoot = basisSet.SyncRoot; if(_syncRoot == null) throw new NullReferenceException("The Set you specified returned a null SyncRoot."); }
/// <summary> /// Returns all Edges that connect the two nodes (which are assumed to be different). /// </summary> /// <param name="node0"></param> /// <param name="node1"></param> /// <returns></returns> public static IList getEdgesBetween(Node node0, Node node1) { IList edges0 = DirectedEdge.ToEdges(node0.OutEdges.Edges); Set<DirectedEdge> commonEdges = new Set<DirectedEdge>(edges0.Cast<DirectedEdge>()); IList edges1 = DirectedEdge.ToEdges(node1.OutEdges.Edges); commonEdges.RemoveMany(edges1.Cast<DirectedEdge>()); return new ArrayList(commonEdges); }
public BinaryDecisionTree(string prop, BinaryDecisionTree t, BinaryDecisionTree f, double maxv, double minv, Set<int> setStates) { this.property = prop; this.trueEdge = t; this.falseEdge = f; this.maxValue = maxv; this.minValue = minv; this.states = setStates; }
/// <summary>Create the Set just once</summary> private static void CreateSet() { lock(typeof(MessageSequencer)) { if (msgs != null) return; msgs = new Set<int>(s_seqMessages); } }
void UpdateFront() { var newFrontEdges = new Set<CdtEdge>(); foreach (var t in addedTriangles) foreach (var e in t.Edges) if (e.CwTriangle == null || e.CcwTriangle == null) newFrontEdges.Insert(e); foreach (var e in newFrontEdges) AddEdgeToFront(e); }
public void TestSetDifference(Set<string> setFirst, Set<string> setSecond, Set<string> resultSet) { setFirst.Difference(setSecond); var iteratorFirst = setFirst.GetEnumerator(); var resultIterator = resultSet.GetEnumerator(); while (iteratorFirst.MoveNext() && resultIterator.MoveNext()) { Assert.AreEqual(iteratorFirst.Current, resultIterator.Current); } }
static void Main(string[] args) { var stackOfValues = new Stack<string>(); GetInitialValuesFromArgs(args, ref stackOfValues); var demoSet1 = new Set<string>(stackOfValues.ToArray()); Console.WriteLine(demoSet1.ToString()); var demoSet3 = new SortedSet(stackOfValues.ToArray()); Console.WriteLine(demoSet3.ToString()); Console.ReadKey(); }
internal static bool NumberOfActiveNodesIsUnderThreshold(List<Edge> inParentEdges, List<Edge> outParentEdges, int threshold) { var usedNodeSet = new Set<Node>(); foreach (var edge in inParentEdges) if(SetOfActiveNodesIsLargerThanThreshold((Cluster)edge.Target, edge.Source, usedNodeSet, threshold)) return false; foreach (var edge in outParentEdges) if(SetOfActiveNodesIsLargerThanThreshold((Cluster)edge.Source, edge.Target, usedNodeSet, threshold)) return false; return true; }
/// <summary> /// A MultiPoint is simple if it has no repeated points. /// </summary> public bool IsSimple(IMultiPoint mp) { if (mp.IsEmpty) return true; Set<ICoordinate> points = new Set<ICoordinate>(); for (int i = 0; i < mp.NumGeometries; i++) { IPoint pt = (IPoint) mp.GetGeometryN(i); ICoordinate p = pt.Coordinate; if (points.Contains(p)) return false; points.Add(p); } return true; }
/// <summary> /// Select an action that is enabled in the current state /// and whose action symbol is in the set <paramref name="actionSymbols"/>. /// Use coverage points and reward policy. /// </summary> /// <param name="actionSymbols">set of candidate action symbols</param> /// <returns>the chosen action or null if no choice is possible</returns> public override Action SelectAction(Set<Symbol> actionSymbols) { if (actionSymbols == null) throw new ArgumentNullException("actionSymbols"); if (actionSymbols.IsEmpty) return null; Sequence<Action> actions = new Sequence<Action>(this.GetEnabledActions(actionSymbols)); if (actions.IsEmpty) return null; Action a = ChooseAction(actions, this.CurrentState); //choose a tester action //System.Console.WriteLine("Chosen Action " + a.ToString()); return a; }
public void IntersectionTest() { Set<int> a = new Set<int>(); for(int i = 0; i < 16; i += 2) a.Add(i); Set<int> b = new Set<int>(); for(int i = 0; i < 16; i += 3) b.Add(i); Set<int> inter = a.Intersect(b); Assert.AreEqual(3, inter.Count, "A01"); Assert.AreEqual(0, inter[0], "A02"); Assert.AreEqual(6, inter[1], "A03"); Assert.AreEqual(12, inter[2], "A04"); }
public void minusTest() { Collections.Set<int> s = new Collections.Set<int>(); Collections.Set<int> s2 = new Collections.Set<int>(); int[] a = { 1, 5, 6, 9 }; int[] b = { 1, 4, 7, 9 }; for (int i = 0; i < a.Length; i++) { s += (a[i]); s2 += (b[i]); } for (int i = 0; i < a.Length; i++) { s -= (a[i]); s2 -= (b[i]); } Assert.AreEqual(0, s.size()); Assert.AreEqual(0, s2.size()); }
public void ConstructionTests() { Set<int> set = new Set<int>(); Assert.AreEqual(0, set.Count, "Set should be empty at first."); set = new Set<int>(0); Assert.AreEqual(0, set.Count, "Set should be empty at first."); set = Set<int>.Empty; Assert.AreEqual(0, set.Count, "Set should be empty at first."); int[] ints = new int[] { 1, 2 }; set = new Set<int>(ints); Assert.AreEqual(2, set.Count, "Set should two itmes in it."); Set<int> set2 = Create(1, 2); set = new Set<int>(set2); Assert.AreEqual(2, set.Count, "Set should two itmes in it."); set = new Set<int>(set2 as IEnumerable<int>); Assert.AreEqual(2, set.Count, "Set should two itmes in it."); }
public void Init(FdoCache cache, bool enableCancel) { CheckDisposed(); m_cache = cache; m_btnCancel.Visible = enableCancel; Set<int> revIdxWs = new Set<int>(4); foreach (IReversalIndex ri in cache.LangProject.LexDbOA.ReversalIndexesOC) revIdxWs.Add(ri.WritingSystemRAHvo); // Include only the analysis writing systems chosen by the user. See LT-7514 and LT-7239. Set<int> activeWs = new Set<int>(8); foreach (int ws in cache.LangProject.AnalysisWssRC.HvoArray) activeWs.Add(ws); m_cbWritingSystems.Sorted = true; m_cbWritingSystems.DisplayMember = "Name"; NamedWritingSystem nwsSelected = null; foreach (NamedWritingSystem nws in cache.LangProject.GetDbNamedWritingSystems()) { if (revIdxWs.Contains(nws.Hvo)) { AddLanguageForExistingRevIdx(nws.IcuLocale); continue; } if (!activeWs.Contains(nws.Hvo)) continue; m_cbWritingSystems.Items.Add(nws); if (nwsSelected == null && !LanguageMatchesExistingRevIdx(nws.IcuLocale)) nwsSelected = nws; } if (nwsSelected != null) m_cbWritingSystems.SelectedItem = nwsSelected; if (m_cbWritingSystems.Items.Count > 0 && m_cbWritingSystems.SelectedIndex < 0) m_cbWritingSystems.SelectedIndex = 0; if (!enableCancel && m_cbWritingSystems.Items.Count == 0) throw new ApplicationException("Cancel is disabled, but there are none to choose, so the user has no way to get out of this dialog."); }
public void intersectTest() { Collections.Set<int> s = new Collections.Set<int>(); Collections.Set<int> s2 = new Collections.Set<int>(); int[] a = { 1, 5, 6, 9 }; int[] b = { 1, 4, 7, 9 }; for (int i = 0; i < a.Length; i++) { s += (a[i]); s2 += (b[i]); } int[] expected = { 1, 9 }; Collections.Set<int> res = (s & s2); int[] k = new int[res.size()]; for (int i = 0; i < res.size(); i++) { k[i] = res.get(i); } Array.Sort(k); CollectionAssert.AreEqual(expected, k); }
public abstract void DoIt(Set<int> itemsToChange, ProgressState state);
private void CollectUnreachableTypes( Set<EntityType> reachableTypes, out KeyToListMap<EntityType, LineInfo> entityTypes, out KeyToListMap<EntityType, LineInfo> isTypeOfEntityTypes) { // Collect line infos for types in violation entityTypes = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); isTypeOfEntityTypes = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); if (reachableTypes.Count == MappedEntityTypes.Count) { // All types are reachable; nothing to check return; } // Find IsTypeOf mappings where no type in hierarchy can generate a row foreach (var isTypeOf in m_isTypeOfLineInfos.Keys) { if (!MetadataHelper.GetTypeAndSubtypesOf(isTypeOf, m_itemCollection, false) .Cast<EntityType>() .Intersect(reachableTypes) .Any()) { // no type in the hierarchy is reachable... isTypeOfEntityTypes.AddRange(isTypeOf, m_isTypeOfLineInfos.EnumerateValues(isTypeOf)); } } // Find explicit types not generating a value foreach (var entityType in m_entityTypeLineInfos.Keys) { if (!reachableTypes.Contains(entityType)) { entityTypes.AddRange(entityType, m_entityTypeLineInfos.EnumerateValues(entityType)); } } }
/// <summary> /// Determines which types are produced by this mapping. /// </summary> private Set<EntityType> FindUnambiguouslyReachableTypes( DomainConstraintConversionContext<string, ValueCondition> converter, Vertex[] mappingConditions) { // For each entity type, create a candidate function that evaluates to true given // discriminator assignments iff. all of that type's conditions evaluate to true. var candidateFunctions = new Vertex[MappedEntityTypes.Count]; for (var i = 0; i < candidateFunctions.Length; i++) { // Seed the candidate function conjunction with 'true'. var candidateFunction = Vertex.One; for (var j = 0; j < NormalizedEntityTypeMappings.Count; j++) { var entityTypeMapping = NormalizedEntityTypeMappings[j]; // Determine if this mapping is a positive or negative case for the current type. if (entityTypeMapping.ImpliedEntityTypes[i]) { candidateFunction = converter.Solver.And(candidateFunction, mappingConditions[j]); } } candidateFunctions[i] = candidateFunction; } // Make sure that for each type with satisfiable candidateFunction all assignments for the type resolve to only that type. var unambigouslyReachableMap = new BitArray(candidateFunctions.Length, true); for (var i = 0; i < candidateFunctions.Length; ++i) { if (candidateFunctions[i].IsZero()) { // The i-th type is unreachable regardless of other types. unambigouslyReachableMap[i] = false; } else { for (var j = i + 1; j < candidateFunctions.Length; ++j) { if (!converter.Solver.And(candidateFunctions[i], candidateFunctions[j]).IsZero()) { // The i-th and j-th types have common assignments, hence they aren't unambiguously reachable. unambigouslyReachableMap[i] = false; unambigouslyReachableMap[j] = false; } } } } var reachableTypes = new Set<EntityType>(); for (var i = 0; i < candidateFunctions.Length; ++i) { if (unambigouslyReachableMap[i]) { reachableTypes.Add(MappedEntityTypes[i]); } } return reachableTypes; }
/// <summary> /// Determines which types are produced by this mapping. /// </summary> private Set<EntityType> FindReachableTypes( DomainConstraintConversionContext<string, ValueCondition> converter, Vertex[] mappingConditions) { // For each entity type, create a candidate function that evaluates to true given // discriminator assignments iff. all of that type's conditions evaluate to true // and its negative conditions evaluate to false. var candidateFunctions = new Vertex[MappedEntityTypes.Count]; for (var i = 0; i < candidateFunctions.Length; i++) { // Seed the candidate function conjunction with 'true'. var candidateFunction = Vertex.One; for (var j = 0; j < NormalizedEntityTypeMappings.Count; j++) { var entityTypeMapping = NormalizedEntityTypeMappings[j]; // Determine if this mapping is a positive or negative case for the current type. if (entityTypeMapping.ImpliedEntityTypes[i]) { candidateFunction = converter.Solver.And(candidateFunction, mappingConditions[j]); } else { candidateFunction = converter.Solver.And(candidateFunction, converter.Solver.Not(mappingConditions[j])); } } candidateFunctions[i] = candidateFunction; } // Make sure that for each type there is an assignment that resolves to only that type. var reachableTypes = new Set<EntityType>(); for (var i = 0; i < candidateFunctions.Length; i++) { // Create a function that evaluates to true iff. the current candidate function is true // and every other candidate function is false. var isExactlyThisTypeCondition = converter.Solver.And( candidateFunctions.Select( (typeCondition, ordinal) => ordinal == i ? typeCondition : converter.Solver.Not(typeCondition))); // If the above conjunction is satisfiable, it means some row configuration exists producing the type. if (!isExactlyThisTypeCondition.IsZero()) { reachableTypes.Add(MappedEntityTypes[i]); } } return reachableTypes; }
private DomainVariable<string, ValueCondition>[] ConstructDomainVariables() { // Determine domain for each discriminator column, including "other" and "null" placeholders. var discriminatorDomains = new Set<ValueCondition>[DiscriminatorColumns.Count]; for (var i = 0; i < discriminatorDomains.Length; i++) { discriminatorDomains[i] = new Set<ValueCondition>(); discriminatorDomains[i].Add(ValueCondition.IsOther); discriminatorDomains[i].Add(ValueCondition.IsNull); } // Collect all domain values. foreach (var typeMapping in NormalizedEntityTypeMappings) { for (var i = 0; i < DiscriminatorColumns.Count; i++) { var discriminatorValue = typeMapping.ColumnConditions[i]; if (null != discriminatorValue && !discriminatorValue.ConditionValue.IsNotNullCondition) // NotNull is a special range (everything but IsNull) { discriminatorDomains[i].Add(discriminatorValue.ConditionValue); } } } var discriminatorVariables = new DomainVariable<string, ValueCondition>[discriminatorDomains.Length]; for (var i = 0; i < discriminatorVariables.Length; i++) { // domain variable is identified by the column name and takes all collected domain values discriminatorVariables[i] = new DomainVariable<string, ValueCondition>( DiscriminatorColumns[i], discriminatorDomains[i].MakeReadOnly()); } return discriminatorVariables; }
internal FunctionImportStructuralTypeMappingKB( IEnumerable<FunctionImportStructuralTypeMapping> structuralTypeMappings, ItemCollection itemCollection) { DebugCheck.NotNull(structuralTypeMappings); DebugCheck.NotNull(itemCollection); m_itemCollection = itemCollection; // If no specific type mapping. if (structuralTypeMappings.Count() == 0) { // Initialize with defaults. ReturnTypeColumnsRenameMapping = new Dictionary<string, FunctionImportReturnTypeStructuralTypeColumnRenameMapping>(); NormalizedEntityTypeMappings = new ReadOnlyCollection<FunctionImportNormalizedEntityTypeMapping>( new List<FunctionImportNormalizedEntityTypeMapping>()); DiscriminatorColumns = new ReadOnlyCollection<string>(new List<string>()); MappedEntityTypes = new ReadOnlyCollection<EntityType>(new List<EntityType>()); return; } var entityTypeMappings = structuralTypeMappings.OfType<FunctionImportEntityTypeMapping>(); // FunctionImportEntityTypeMapping if (null != entityTypeMappings && null != entityTypeMappings.FirstOrDefault()) { var isOfTypeEntityTypeColumnsRenameMapping = new Dictionary<EntityType, Collection<FunctionImportReturnTypePropertyMapping>>(); var entityTypeColumnsRenameMapping = new Dictionary<EntityType, Collection<FunctionImportReturnTypePropertyMapping>>(); var normalizedEntityTypeMappings = new List<FunctionImportNormalizedEntityTypeMapping>(); // Collect all mapped entity types. MappedEntityTypes = entityTypeMappings .SelectMany(mapping => mapping.GetMappedEntityTypes(m_itemCollection)) .Distinct() .ToList() .AsReadOnly(); // Collect all discriminator columns. DiscriminatorColumns = entityTypeMappings .SelectMany(mapping => mapping.GetDiscriminatorColumns()) .Distinct() .ToList() .AsReadOnly(); m_entityTypeLineInfos = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); m_isTypeOfLineInfos = new KeyToListMap<EntityType, LineInfo>(EqualityComparer<EntityType>.Default); foreach (var entityTypeMapping in entityTypeMappings) { // Remember LineInfos for error reporting. foreach (var entityType in entityTypeMapping.EntityTypes) { m_entityTypeLineInfos.Add(entityType, entityTypeMapping.LineInfo); } foreach (var isTypeOf in entityTypeMapping.IsOfTypeEntityTypes) { m_isTypeOfLineInfos.Add(isTypeOf, entityTypeMapping.LineInfo); } // Create map from column name to condition. var columnMap = entityTypeMapping.Conditions.ToDictionary( condition => condition.ColumnName, condition => condition); // Align conditions with discriminator columns. var columnMappings = new List<FunctionImportEntityTypeMappingCondition>(DiscriminatorColumns.Count); for (var i = 0; i < DiscriminatorColumns.Count; i++) { var discriminatorColumn = DiscriminatorColumns[i]; FunctionImportEntityTypeMappingCondition mappingCondition; if (columnMap.TryGetValue(discriminatorColumn, out mappingCondition)) { columnMappings.Add(mappingCondition); } else { // Null indicates the value for this discriminator doesn't matter. columnMappings.Add(null); } } // Create bit map for implied entity types. var impliedEntityTypesBitMap = new bool[MappedEntityTypes.Count]; var impliedEntityTypesSet = new Set<EntityType>(entityTypeMapping.GetMappedEntityTypes(m_itemCollection)); for (var i = 0; i < MappedEntityTypes.Count; i++) { impliedEntityTypesBitMap[i] = impliedEntityTypesSet.Contains(MappedEntityTypes[i]); } // Construct normalized mapping. normalizedEntityTypeMappings.Add( new FunctionImportNormalizedEntityTypeMapping(this, columnMappings, new BitArray(impliedEntityTypesBitMap))); // Construct the rename mappings by adding isTypeOf types and specific entity types to the corresponding lists. foreach (var isOfType in entityTypeMapping.IsOfTypeEntityTypes) { if (!isOfTypeEntityTypeColumnsRenameMapping.Keys.Contains(isOfType)) { isOfTypeEntityTypeColumnsRenameMapping.Add( isOfType, new Collection<FunctionImportReturnTypePropertyMapping>()); } foreach (var rename in entityTypeMapping.ColumnsRenameList) { isOfTypeEntityTypeColumnsRenameMapping[isOfType].Add(rename); } } foreach (var entityType in entityTypeMapping.EntityTypes) { if (!entityTypeColumnsRenameMapping.Keys.Contains(entityType)) { entityTypeColumnsRenameMapping.Add(entityType, new Collection<FunctionImportReturnTypePropertyMapping>()); } foreach (var rename in entityTypeMapping.ColumnsRenameList) { entityTypeColumnsRenameMapping[entityType].Add(rename); } } } ReturnTypeColumnsRenameMapping = new FunctionImportReturnTypeEntityTypeColumnsRenameBuilder( isOfTypeEntityTypeColumnsRenameMapping, entityTypeColumnsRenameMapping) .ColumnRenameMapping; NormalizedEntityTypeMappings = new ReadOnlyCollection<FunctionImportNormalizedEntityTypeMapping>( normalizedEntityTypeMappings); } else { // FunctionImportComplexTypeMapping Debug.Assert( structuralTypeMappings.First() is FunctionImportComplexTypeMapping, "only two types can have renames, complexType and entityType"); var complexTypeMappings = structuralTypeMappings.Cast<FunctionImportComplexTypeMapping>(); Debug.Assert( complexTypeMappings.Count() == 1, "how come there are more than 1, complex type cannot derive from other complex type"); ReturnTypeColumnsRenameMapping = new Dictionary<string, FunctionImportReturnTypeStructuralTypeColumnRenameMapping>(); foreach (var rename in complexTypeMappings.First().ColumnsRenameList) { var columnRenameMapping = new FunctionImportReturnTypeStructuralTypeColumnRenameMapping(rename.CMember); columnRenameMapping.AddRename( new FunctionImportReturnTypeStructuralTypeColumn( rename.SColumn, complexTypeMappings.First().ReturnType, false, rename.LineInfo)); ReturnTypeColumnsRenameMapping.Add(rename.CMember, columnRenameMapping); } // Initialize the entity mapping data as empty. NormalizedEntityTypeMappings = new ReadOnlyCollection<FunctionImportNormalizedEntityTypeMapping>( new List<FunctionImportNormalizedEntityTypeMapping>()); DiscriminatorColumns = new ReadOnlyCollection<string>( new List<string> { }); MappedEntityTypes = new ReadOnlyCollection<EntityType>( new List<EntityType> { }); } }
public override void DeleteMultipleItems () { Set<IWorkspaceFileObject> items = new Set<IWorkspaceFileObject> (); foreach (ITreeNavigator node in CurrentNodes) { Solution solution = node.DataItem as Solution; Workspace parent = node.GetParentDataItem (typeof(Workspace), false) as Workspace; if (parent == null) return; if (MessageService.Confirm (GettextCatalog.GetString ("Do you really want to remove solution {0} from workspace {1}?", solution.Name, parent.Name), AlertButton.Remove)) { if (IdeApp.Workspace.RequestItemUnload (solution)) { parent.Items.Remove (solution); solution.Dispose (); items.Add (parent); } } } IdeApp.ProjectOperations.Save (items); }
public void UndoImport_ReplaceBook() { FdoCache cache = m_firstMainWnd.Cache; Scripture scr = (Scripture)m_firstMainWnd.ScriptureObj; Set<int> origDrafts = new Set<int>(scr.ArchivedDraftsOC.HvoArray); // Create a settings object and set it to be a Paratext import of Philemon. ScrImportSet settings = new ScrImportSet(); scr.ImportSettingsOC.Add(settings); settings.ImportTypeEnum = TypeOfImport.Paratext6; settings.ParatextScrProj = "TEV"; settings.SetMapping(MappingSet.Main, new ImportMappingInfo(@"\it", @"\it*", false, MappingTargetType.TEStyle, MarkerDomain.Default, "Emphasis", null)); cache.Save(); // Setup the reference to import. BCVRef scrRef = new BCVRef(57001001); // Do the import. settings.ImportTranslation = true; settings.ImportBookIntros = true; settings.StartRef = scrRef; settings.EndRef = scrRef; m_firstMainWnd.Import(settings); IScrDraft importedDraft = GetImportedVersion(cache, origDrafts, 2); IScrBook importedPhm = importedDraft.FindBook(57); Assert.IsNotNull(importedPhm); Assert.IsTrue(cache.ActionHandlerAccessor.CanUndo()); Assert.AreEqual(UndoResult.kuresRefresh, cache.ActionHandlerAccessor.Undo()); IScrBook restoredPhm = scr.FindBook(57); Assert.IsNotNull(restoredPhm); Assert.AreNotEqual(restoredPhm.Hvo, importedPhm.Hvo); Set<int> finalDrafts = new Set<int>(scr.ArchivedDraftsOC.HvoArray); Assert.AreEqual(origDrafts.Count, finalDrafts.Count); }
public void UndoImport_NewBook() { FdoCache cache = m_firstMainWnd.Cache; Scripture scr = (Scripture)m_firstMainWnd.ScriptureObj; Set<int> origDrafts = new Set<int>(scr.ArchivedDraftsOC.HvoArray); // Create a settings object and set it to be a Paratext import of Titus. ScrImportSet settings = new ScrImportSet(); scr.ImportSettingsOC.Add(settings); settings.ImportTypeEnum = TypeOfImport.Paratext6; settings.ParatextScrProj = "TEV"; settings.SetMapping(MappingSet.Main, new ImportMappingInfo(@"\it", @"\it*", false, MappingTargetType.TEStyle, MarkerDomain.Default, "Emphasis", null)); cache.Save(); // Setup the reference to import. BCVRef scrRef = new BCVRef(56001001); // Do the import. settings.ImportTranslation = true; settings.ImportBookIntros = true; settings.StartRef = scrRef; settings.EndRef = scrRef; m_firstMainWnd.Import(settings); IScrDraft importedDrafts = GetImportedVersion(cache, origDrafts, 1); Assert.IsNotNull(importedDrafts.FindBook(56)); Assert.IsTrue(cache.ActionHandlerAccessor.CanUndo()); Assert.AreEqual(UndoResult.kuresRefresh, cache.ActionHandlerAccessor.Undo()); Assert.IsNull(scr.FindBook(56)); // JohnT: no longer happens, and I can't think why it should, since Undo does not // change Scripture. Set<int> finalDrafts = new Set<int>(scr.ArchivedDraftsOC.HvoArray); Assert.AreEqual(origDrafts.Count, finalDrafts.Count); }
/// ------------------------------------------------------------------------------------ /// <summary> /// Gets the imported draft. /// </summary> /// <param name="cache">The cache.</param> /// <param name="origDrafts">The orig drafts.</param> /// <param name="cExpectedNewVersions">The number of expected new versions (must be 1 or /// 2: the imported version and possibly a backup saved version) /// </param> /// <returns></returns> /// ------------------------------------------------------------------------------------ private IScrDraft GetImportedVersion(FdoCache cache, Set<int> origDrafts, int cExpectedNewVersions) { Debug.Assert(cExpectedNewVersions >= 1 && cExpectedNewVersions <= 2); Set<int> curDrafts = new Set<int>(cache.LangProject.TranslatedScriptureOA.ArchivedDraftsOC.HvoArray); Set<int> newDrafts = curDrafts.Difference(origDrafts); Assert.AreEqual(cExpectedNewVersions, newDrafts.Count); IScrDraft result = ScrDraft.CreateFromDBObject(cache, new List<int>(newDrafts)[0]); if (result.Type == ScrDraftType.ImportedVersion) return result; return ScrDraft.CreateFromDBObject(cache, new List<int>(newDrafts)[1]); }
public void FakeDoit(Set<int> itemsToChange, int tagFakeFlid, int tagEnable, ProgressState state) { CheckDisposed(); IVwCacheDa cda = m_cache.VwCacheDaAccessor; ISilDataAccess sda = m_cache.MainCacheAccessor; ITsString tss = m_cache.MakeAnalysisTss(m_selectedLabel); int i = 0; // Report progress 50 times or every 100 items, whichever is more (but no more than once per item!) int interval = Math.Min(100, Math.Max(itemsToChange.Count / 50, 1)); foreach (int hvo in itemsToChange) { i++; if (i % interval == 0) { state.PercentDone = i * 100 / itemsToChange.Count; state.Breath(); } bool fEnable = CanFakeIt(hvo); if (fEnable) cda.CacheStringProp(hvo, tagFakeFlid, tss); cda.CacheIntProp(hvo, tagEnable, (fEnable ? 1 : 0)); } }
public override void DoIt(Set<int> itemsToChange, ProgressState state) { CheckDisposed(); ISilDataAccess sda = m_cache.MainCacheAccessor; // Make a hashtable from HVO of entry to list of modified senses. Dictionary<int, List<int>> sensesByEntry = new Dictionary<int, List<int>>(); int tagOwningEntry = m_cache.VwCacheDaAccessor.GetVirtualHandlerName("LexSense", "OwningEntry").Tag; int i = 0; // Report progress 50 times or every 100 items, whichever is more (but no more than once per item!) int interval = Math.Min(100, Math.Max(itemsToChange.Count / 50, 1)); foreach (int hvoSense in itemsToChange) { i++; if (i % interval == 0) { state.PercentDone = i * 20 / itemsToChange.Count; state.Breath(); } int hvoMsa = sda.get_ObjectProp(hvoSense, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis); if (hvoMsa != 0 && m_cache.GetClassOfObject(hvoMsa) != MoStemMsa.kclsidMoStemMsa) continue; // can't fix this one, not a stem. int hvoEntry = sda.get_ObjectProp(hvoSense, tagOwningEntry); List<int> senses = null; if (!sensesByEntry.TryGetValue(hvoEntry, out senses)) { senses = new List<int>(); sensesByEntry[hvoEntry] = senses; } senses.Add(hvoSense); } m_cache.BeginUndoTask(FdoUiStrings.ksUndoBulkEditPOS, FdoUiStrings.ksRedoBulkEditPOS); BulkEditBar.ForceRefreshOnUndoRedo(sda); i = 0; interval = Math.Min(100, Math.Max(sensesByEntry.Count / 50, 1)); foreach (KeyValuePair<int, List<int>> kvp in sensesByEntry) { i++; if (i % interval == 0) { state.PercentDone = i * 80 / sensesByEntry.Count + 20; state.Breath(); } int hvoEntry = kvp.Key; List<int> sensesToChange = kvp.Value; int hvoMsmTarget = 0; int cmsa = sda.get_VecSize(hvoEntry, (int)LexEntry.LexEntryTags.kflidMorphoSyntaxAnalyses); bool fAssumeSurvives = true; // true if we know all old MSAs will survive. for (int imsa = 0; imsa < cmsa; imsa++) { int hvoMsa = sda.get_VecItem(hvoEntry, (int)LexEntry.LexEntryTags.kflidMorphoSyntaxAnalyses, imsa); if (m_cache.GetClassOfObject(hvoMsa) == MoStemMsa.kclsidMoStemMsa && sda.get_ObjectProp(hvoMsa, (int)MoStemMsa.MoStemMsaTags.kflidPartOfSpeech) == m_selectedHvo) { // Can reuse this one! hvoMsmTarget = hvoMsa; fAssumeSurvives = false; // old MSA may be redundant. break; } } if (hvoMsmTarget == 0) { // See if we can reuse an existing MoStemMsa by changing it. // This is possible if it is used only by senses in the list, or not used at all. List<int> otherSenses = new List<int>(); AddExcludedSenses(sda, hvoEntry, (int)LexEntry.LexEntryTags.kflidSenses, otherSenses, sensesToChange); for (int imsa = 0; imsa < cmsa; imsa++) { int hvoMsa = sda.get_VecItem(hvoEntry, (int)LexEntry.LexEntryTags.kflidMorphoSyntaxAnalyses, imsa); if (m_cache.GetClassOfObject(hvoMsa) != MoStemMsa.kclsidMoStemMsa) continue; bool fOk = true; foreach (int hvoOtherSense in otherSenses) { if (sda.get_ObjectProp(hvoOtherSense, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis) == hvoMsa) { fOk = false; // we can't change it, one of the unchanged senses uses it break; } } if (fOk) { // Can reuse this one! Nothing we don't want to change uses it. Go ahead and set it to the // required POS. hvoMsmTarget = hvoMsa; int hvoOld = sda.get_ObjectProp(hvoMsmTarget, (int) MoStemMsa.MoStemMsaTags.kflidPartOfSpeech); sda.SetObjProp(hvoMsmTarget, (int) MoStemMsa.MoStemMsaTags.kflidPartOfSpeech, m_selectedHvo); sda.PropChanged(null, (int)PropChangeType.kpctNotifyAll, hvoMsmTarget, (int)MoStemMsa.MoStemMsaTags.kflidPartOfSpeech, 0, 1, hvoOld == 0 ? 1 : 0); // compare MoStemMsa.ResetInflectionClass: changing POS requires us to clear inflection class, // if it is set. if (hvoOld != 0 && sda.get_ObjectProp(hvoMsmTarget, (int)MoStemMsa.MoStemMsaTags.kflidInflectionClass) != 0) { sda.SetObjProp(hvoMsmTarget, (int)MoStemMsa.MoStemMsaTags.kflidInflectionClass, 0); sda.PropChanged(null, (int)PropChangeType.kpctNotifyAll, hvoMsmTarget, (int)MoStemMsa.MoStemMsaTags.kflidInflectionClass, 0, 0, 1); } break; } } } if (hvoMsmTarget == 0) { // Nothing we can reuse...make a new one. hvoMsmTarget = sda.MakeNewObject((int)MoStemMsa.kclsidMoStemMsa, hvoEntry, (int)LexEntry.LexEntryTags.kflidMorphoSyntaxAnalyses, -1); sda.SetObjProp(hvoMsmTarget, (int)MoStemMsa.MoStemMsaTags.kflidPartOfSpeech, m_selectedHvo); sda.PropChanged(null, (int)PropChangeType.kpctNotifyAll, hvoMsmTarget, (int)MoStemMsa.MoStemMsaTags.kflidInflectionClass, m_cache.GetObjIndex(hvoEntry, (int)LexEntry.LexEntryTags.kflidMorphoSyntaxAnalyses, hvoMsmTarget), 1, 0); } // Finally! Make the senses we want to change use it. foreach (int hvoSense in sensesToChange) { int hvoOld = sda.get_ObjectProp(hvoSense, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis); if (hvoOld == hvoMsmTarget) continue; // reusing a modified msa. LexSense.HandleOldMSA(m_cache, hvoSense, hvoMsmTarget, fAssumeSurvives); sda.SetObjProp(hvoSense, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis, hvoMsmTarget); sda.PropChanged(null, (int)PropChangeType.kpctNotifyAll, hvoSense, (int)LexSense.LexSenseTags.kflidMorphoSyntaxAnalysis, 0, 1, hvoOld == 0 ? 1 : 0); } } m_cache.EndUndoTask(); }
/// <summary> /// Constructs an immutable (read-only) <c>Set</c> wrapper. /// </summary> /// <param name="basisSet">The <c>Set</c> that is wrapped.</param> public ImmutableSet(Set basisSet) { _basisSet = basisSet; }