private void InitialLevels() { var verticesLeft = new HashSet <TVertex>(VisitedGraph.Vertices); // Initial 0th level Levels.Add(new HashSet <TVertex>(VisitedGraph.Vertices.Where(v => _compoundGraph.GetParent(v) == default(TVertex)))); verticesLeft.RemoveAll(Levels[0]); // Other layers for (int i = 1; verticesLeft.Count > 0; ++i) { var nextLevel = new HashSet <TVertex>(); foreach (TVertex parent in Levels[i - 1]) { if (_compoundGraph.GetChildrenCount(parent) <= 0) { continue; } foreach (TVertex children in _compoundGraph.GetChildrenVertices(parent)) { nextLevel.Add(children); } } Levels.Add(nextLevel); verticesLeft.RemoveAll(nextLevel); } }
public void RemoveAll() { HashSet <int> list2 = new HashSet <int> (); hashset.Add(4); hashset.Add(5); hashset.Add(6); list2.Add(5); list2.Add(7); list2.Add(4); hashset.RemoveAll(list2); Assert.IsTrue(IC.eq(hashset, 6)); hashset.Add(5); hashset.Add(4); list2.Clear(); list2.Add(6); list2.Add(5); hashset.RemoveAll(list2); Assert.IsTrue(IC.eq(hashset, 4)); list2.Clear(); list2.Add(7); list2.Add(8); list2.Add(9); hashset.RemoveAll(list2); Assert.IsTrue(IC.eq(hashset, 4)); }
public void ShouldExtendSetByRemoveAllMethod() { // given ISet <string> set = new HashSet <string> { "A", "B", "C", "D", "E", "F" }; // when set.RemoveAll(Lists.AsList("A", "B")); set.RemoveAll("E", "F"); // then Check.That(set).HasSize(2); Check.That(set).Contains("C", "D"); }
private SyntaxNode AddNamespaceImports( Document document, SemanticModel model, OptionSet options, IEnumerable<INamespaceSymbol> namespaces) { var existingNamespaces = new HashSet<INamespaceSymbol>(); this.GetExistingImportedNamespaces(document, model, existingNamespaces); var namespacesToAdd = new HashSet<INamespaceSymbol>(namespaces); namespacesToAdd.RemoveAll(existingNamespaces); var root = model.SyntaxTree.GetRoot(); if (namespacesToAdd.Count == 0) { return root; } var gen = SyntaxGenerator.GetGenerator(document); var newRoot = root; foreach (var import in namespacesToAdd.Select(ns => gen.NamespaceImportDeclaration(ns.ToDisplayString()).WithAdditionalAnnotations(Simplifier.Annotation))) { newRoot = this.InsertNamespaceImport(newRoot, gen, import, options); } return newRoot; }
private SyntaxNode AddNamespaceImports( Document document, SemanticModel model, OptionSet options, IEnumerable <INamespaceSymbol> namespaces) { var existingNamespaces = new HashSet <INamespaceSymbol>(); this.GetExistingImportedNamespaces(document, model, existingNamespaces); var namespacesToAdd = new HashSet <INamespaceSymbol>(namespaces); namespacesToAdd.RemoveAll(existingNamespaces); var root = model.SyntaxTree.GetRoot(); if (namespacesToAdd.Count == 0) { return(root); } var gen = SyntaxGenerator.GetGenerator(document); var newRoot = root; foreach (var import in namespacesToAdd.Select(ns => gen.NamespaceImportDeclaration(ns.ToDisplayString()).WithAdditionalAnnotations(Simplifier.Annotation))) { newRoot = this.InsertNamespaceImport(newRoot, gen, import, options); } return(newRoot); }
private async Task <SyntaxNode> AddNamespaceImportsAsync( Document document, SemanticModel model, OptionSet options, IEnumerable <INamespaceSymbol> namespaces, CancellationToken cancellationToken) { var existingNamespaces = new HashSet <INamespaceSymbol>(); await this.GetExistingImportedNamespacesAsync(document, model, existingNamespaces, cancellationToken).ConfigureAwait(false); var namespacesToAdd = new HashSet <INamespaceSymbol>(namespaces, NamespaceEqualityComparer.Singleton); namespacesToAdd.RemoveAll(existingNamespaces); var root = await model.SyntaxTree.GetRootAsync(cancellationToken).ConfigureAwait(false); if (namespacesToAdd.Count == 0) { return(root); } var gen = SyntaxGenerator.GetGenerator(document); var newRoot = root; foreach (var import in namespacesToAdd.Select(ns => gen.NamespaceImportDeclaration(ns.ToDisplayString()).WithAdditionalAnnotations(Simplifier.Annotation))) { newRoot = this.InsertNamespaceImport(newRoot, gen, import, options); } return(newRoot); }
public static ISet <T> Minus <T>(this ISet <T> set, IEnumerable <T> values) { var result = new HashSet <T>(set); result.RemoveAll(values); return(result); }
/// <summary> /// If all the <paramref name="childDeclarators"/> are contained in <paramref name="declarators"/>, /// the removes the <paramref name="childDeclarators"/> from <paramref name="declarators"/>, and /// adds the <paramref name="parentDeclaration"/> to the <paramref name="declarators"/>. /// </summary> protected static void AdjustAndAddAppropriateDeclaratorsToRemove(SyntaxNode parentDeclaration, IEnumerable <SyntaxNode> childDeclarators, HashSet <SyntaxNode> declarators) { if (declarators.Contains(parentDeclaration)) { Debug.Assert(childDeclarators.All(c => !declarators.Contains(c))); return; } var declaratorsContainsAllChildren = true; foreach (var childDeclarator in childDeclarators) { if (!declarators.Contains(childDeclarator)) { declaratorsContainsAllChildren = false; break; } } if (declaratorsContainsAllChildren) { // Remove the entire parent declaration instead of individual child declarators within it. declarators.Add(parentDeclaration); declarators.RemoveAll(childDeclarators); } }
private void Ready() { if (is_ready == false) { HashSet <SpectrumBand <T> > current_bands = new HashSet <SpectrumBand <T> >(); lower_bound = float.PositiveInfinity; upper_bound = float.NegativeInfinity; foreach (SpectrumBound <T> bound in bounds.Values) { if (bound.GetValue() < lower_bound) { lower_bound = bound.GetValue(); } if (bound.GetValue() > upper_bound) { upper_bound = bound.GetValue(); } current_bands.AddRange(bound.GetStartingBands()); current_bands.RemoveAll(bound.GetEndingBands()); bound.SetActiveBands(current_bands); } is_ready = true; } }
private async Task<SyntaxNode> AddNamespaceImportsAsync( Document document, SemanticModel model, OptionSet options, IEnumerable<INamespaceSymbol> namespaces, CancellationToken cancellationToken) { var existingNamespaces = new HashSet<INamespaceSymbol>(); await this.GetExistingImportedNamespacesAsync(document, model, existingNamespaces, cancellationToken).ConfigureAwait(false); var namespacesToAdd = new HashSet<INamespaceSymbol>(namespaces); namespacesToAdd.RemoveAll(existingNamespaces); var root = await model.SyntaxTree.GetRootAsync(cancellationToken).ConfigureAwait(false); if (namespacesToAdd.Count == 0) { return root; } var gen = SyntaxGenerator.GetGenerator(document); var newRoot = root; foreach (var import in namespacesToAdd.Select(ns => gen.NamespaceImportDeclaration(ns.ToDisplayString()).WithAdditionalAnnotations(Simplifier.Annotation))) { newRoot = this.InsertNamespaceImport(newRoot, gen, import, options); } return newRoot; }
private static bool FindSimpleStatements(Statement stat, Dictionary <int, HashSet < int> > mapExtPost) { bool found; bool success = false; do { found = false; List <Statement> lstStats = stat.GetPostReversePostOrderList(); foreach (Statement st in lstStats) { Statement result = DetectStatement(st); if (result != null) { if (stat.type == Statement.Type_General && result.GetFirst() == stat.GetFirst() && stat.GetStats().Count == result.GetStats().Count) { // mark general statement stat.type = Statement.Type_Placeholder; } stat.CollapseNodesToStatement(result); // update the postdominator map if (!(mapExtPost.Count == 0)) { HashSet <int> setOldNodes = new HashSet <int>(); foreach (Statement old in result.GetStats()) { setOldNodes.Add(old.id); } int newid = result.id; foreach (int key in new List <int>(mapExtPost.Keys)) { HashSet <int> set = mapExtPost.GetOrNull(key); int oldsize = set.Count; set.RemoveAll(setOldNodes); if (setOldNodes.Contains(key)) { Sharpen.Collections.AddAll(mapExtPost.ComputeIfAbsent(newid, (int k) => new HashSet <int>()), set); Sharpen.Collections.Remove(mapExtPost, key); } else if (set.Count < oldsize) { set.Add(newid); } } } found = true; break; } } if (found) { success = true; } }while (found); return(success); }
/// <summary> /// Returns collection with removed items. /// </summary> /// <typeparam name="T">Type of collection item</typeparam> /// <param name="source">this</param> /// <param name="items">removed items</param> /// <returns>new collection with removed items</returns> public static ISet <T> Minus <T>(this ISet <T> source, IEnumerable <T> items) { Preconditions.IsNotNull(source, () => new ArgumentNullException("source")); var result = new HashSet <T>(source); result.RemoveAll(items); return(result); }
/// <summary>Perform set difference operation on keyMap2 from keyMap1.</summary> /// <param name="keyMap1">The initial set</param> /// <param name="keyMap2">The set to subtract</param> /// <returns>Returns set operation keyMap1-keyMap2</returns> private static ICollection <string> CompareConfigurationToXmlFields(IDictionary <string , string> keyMap1, IDictionary <string, string> keyMap2) { ICollection <string> retVal = new HashSet <string>(keyMap1.Keys); retVal.RemoveAll(keyMap2.Keys); return(retVal); }
/// <summary> /// Extracts the nested prefab dependencies and non prefab assets /// </summary> /// <returns>The nested dependencies.</returns> public static HashSet <string> ExtractNestedDependencies(string path) { var dep = new HashSet <string>(AssetDatabase.GetDependencies(path, true)); var nested = GetOutermostNestedPrefabs(path).ToArray(); dep.RemoveAll(AssetDatabase.GetDependencies(nested, true)); dep.AddAll(nested); return(dep); }
private static HashSet <Project> GetViableUnreferencedProjects(Project project) { var solution = project.Solution; var viableProjects = new HashSet <Project>(solution.Projects); // Clearly we can't reference ourselves. viableProjects.Remove(project); // We can't reference any project that transitively depends on on us. Doing so would // cause a circular reference between projects. var dependencyGraph = solution.GetProjectDependencyGraph(); var projectsThatTransitivelyDependOnThisProject = dependencyGraph.GetProjectsThatTransitivelyDependOnThisProject(project.Id); viableProjects.RemoveAll(projectsThatTransitivelyDependOnThisProject.Select(id => solution.GetProject(id))); // We also aren't interested in any projects we're already directly referencing. viableProjects.RemoveAll(project.ProjectReferences.Select(r => solution.GetProject(r.ProjectId))); return(viableProjects); }
public static HashSet<LabelSymbol> Find(BoundNode node, Dictionary<BoundNode, HashSet<LabelSymbol>> unmatchedLabelsCache) { UnmatchedGotoFinder finder = new UnmatchedGotoFinder(unmatchedLabelsCache); finder.Visit(node); HashSet<LabelSymbol> gotos = finder.gotos; HashSet<LabelSymbol> targets = finder.targets; if (gotos != null && targets != null) { gotos.RemoveAll(targets); } return gotos; }
public static bool IsExitComponent(List <Statement> lst) { HashSet <Statement> set = new HashSet <Statement>(); foreach (var stat in lst) { Sharpen.Collections.AddAll(set, stat.GetNeighbours(StatEdge.Type_Regular, Statement .Direction_Forward)); } set.RemoveAll(lst); return(set.Count == 0); }
protected internal virtual void MakeInitialBlocks() { // sink block (for if the automaton isn't complete MakeBlock(Java.Util.Collections.Singleton(SinkNode)); // accepting block ISet endNodes = GetUnminimizedFA().GetEndNodes(); MakeBlock(endNodes); // main block ICollection nonFinalNodes = new HashSet(GetUnminimizedFA().GetNodes()); nonFinalNodes.RemoveAll(endNodes); MakeBlock(nonFinalNodes); }
/// <summary> /// Enumerate all derived mods from a list of mods /// </summary> /// <param name="ModList"></param> private void EnumerateMods() { DirectoryInfo location = new DirectoryInfo(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)); HashSet <string> suppress = AtomicFile.ReadStream(Path.Combine(location.FullName, "loader.suppress"), false).ReadAllText().Split('\n', '\r', ' ', '\t') .ToSet(); suppress.RemoveAll((string x) => x.Trim().Length == 0); List <Type> ModList = new List <Type>(); foreach (FileInfo AssemblyDll in location.GetFiles("*.dll").Concat(location.GetFiles("*.exe"))) { HashSet <string> source = suppress; Func <string, bool> predicate = (string suppression) => AssemblyDll.FullName.IndexOf(suppression, StringComparison.OrdinalIgnoreCase) != -1; if (!source.Any(predicate) && File.Exists(Path.ChangeExtension(AssemblyDll.FullName, ".mod"))) { try { foreach (Type AssemblyType in Assembly.LoadFrom(AssemblyDll.FullName).GetTypes()) { if (!AssemblyType.IsAbstract && AssemblyType.IsClass && typeof(SunbeamMod).IsAssignableFrom(AssemblyType)) { ModList.Add(AssemblyType); } } } catch (ReflectionTypeLoadException e) {} } } Logger.WriteLine("SunbeamController.EnumerateMods: Found " + ModList.Count + " mods."); foreach (Type ModType in ModList) { try { SunbeamMod Mod = (SunbeamMod)Activator.CreateInstance(ModType); Mod.ApplyHarmonyPatches(); Mod.Initialize(); Logger.WriteLine("SunbeamController.EnumerateMods: Loaded mod '" + Mod.ModIdentifier + "'"); this.Mods.Add(Mod); } catch (Exception e) { Logger.WriteLine("SunbeamController.EnumerateMods: Exception thrown - " + e.ToString()); } } }
private static bool GetUsedVersions(SSAUConstructorSparseEx ssa, VarVersionPair var, List <VarVersionNode> res) { VarVersionsGraph ssuversions = ssa.GetSsuversions(); VarVersionNode varnode = ssuversions.nodes.GetWithKey(var); HashSet <VarVersionNode> setVisited = new HashSet <VarVersionNode>(); HashSet <VarVersionNode> setNotDoms = new HashSet <VarVersionNode>(); LinkedList <VarVersionNode> stack = new LinkedList <VarVersionNode>(); stack.AddLast(varnode); while (!(stack.Count == 0)) { VarVersionNode nd = stack.RemoveAtReturningValue(0); setVisited.Add(nd); if (nd != varnode && (nd.flags & VarVersionNode.Flag_Phantom_Finexit) == 0) { res.Add(nd); } foreach (VarVersionEdge edge in nd.succs) { VarVersionNode succ = edge.dest; if (!setVisited.Contains(edge.dest)) { bool isDominated = true; foreach (VarVersionEdge prededge in succ.preds) { if (!setVisited.Contains(prededge.source)) { isDominated = false; break; } } if (isDominated) { stack.AddLast(succ); } else { setNotDoms.Add(succ); } } } } setNotDoms.RemoveAll(setVisited); return(!(setNotDoms.Count == 0)); }
public static bool CheckStatementExceptions <_T0>(List <_T0> lst) where _T0 : Statement { HashSet <Statement> all = new HashSet <Statement>(lst); HashSet <Statement> handlers = new HashSet <Statement>(); HashSet <Statement> intersection = null; foreach (var stat in lst) { HashSet <Statement> setNew = stat.GetNeighboursSet(StatEdge.Type_Exception, Statement .Direction_Forward); if (intersection == null) { intersection = setNew; } else { HashSet <Statement> interclone = new HashSet <Statement>(intersection); interclone.RemoveAll(setNew); intersection.RetainAll(setNew); setNew.RemoveAll(intersection); Sharpen.Collections.AddAll(handlers, interclone); Sharpen.Collections.AddAll(handlers, setNew); } } foreach (Statement stat in handlers) { if (!all.Contains(stat) || !(stat.GetNeighbours(StatEdge.Type_Exception , Statement.Direction_Backward).All(all.Contains))) { return(false); } } // check for other handlers (excluding head) for (int i = 1; i < lst.Count; i++) { var stat = lst[i]; if (!(stat.GetPredecessorEdges(StatEdge.Type_Exception).Count == 0) && !handlers. Contains(stat)) { return(false); } } return(true); }
private void AdjustOverlappingRanges(Range newRange, IEnumerable <Range> overlapping) { if (overlapping.Count() == 1) { var singleOverlap = overlapping.First(); var updatedRange = singleOverlap.Merge(newRange); ranges.Remove(singleOverlap); ranges.Add(updatedRange); } else { var updatedRange = newRange.Merge(overlapping); ranges.RemoveAll(overlapping); ranges.Add(updatedRange); } }
private bool TryGetNextBestItemToReportFromOpenedFiles_NoLock(out ValueTuple <object, VisualStudioTaskItem[]> bestItemToReport) { bestItemToReport = default(ValueTuple <object, VisualStudioTaskItem[]>); if (!this.HasPendingOpenDocumentTaskItemsToReport) { return(false); } _inProcessSet.Clear(); var result = false; foreach (var openedDocumentId in _openedFiles) { if (!_workspace.CurrentSolution.ContainsDocument(openedDocumentId)) { _notReportedDocumentItemMap.Remove(openedDocumentId); } if (TryGetNextBestItemToReport_NoLock(_notReportedDocumentItemMap, openedDocumentId, out bestItemToReport)) { result = true; break; } _inProcessSet.Add(openedDocumentId); } if (_inProcessSet.Count > 0) { _openedFiles.RemoveAll(_inProcessSet.OfType <DocumentId>()); _inProcessSet.Clear(); } if (!this.HasPendingTaskItemsToReport) { _openedFiles.Clear(); } return(result); }
/// <summary>Creates a MiniKdc.</summary> /// <param name="conf">MiniKdc configuration.</param> /// <param name="workDir"> /// working directory, it should be the build directory. Under /// this directory an ApacheDS working directory will be created, this /// directory will be deleted when the MiniKdc stops. /// </param> /// <exception cref="System.Exception">thrown if the MiniKdc could not be created.</exception> public MiniKdc(Properties conf, FilePath workDir) { if (!conf.Keys.ContainsAll(Properties)) { ICollection <string> missingProperties = new HashSet <string>(Properties); missingProperties.RemoveAll(conf.Keys); throw new ArgumentException("Missing configuration properties: " + missingProperties ); } this.workDir = new FilePath(workDir, System.Convert.ToString(Runtime.CurrentTimeMillis ())); if (!workDir.Exists() && !workDir.Mkdirs()) { throw new RuntimeException("Cannot create directory " + workDir); } Log.Info("Configuration:"); Log.Info("---------------------------------------------------------------"); foreach (KeyValuePair <object, object> entry in conf) { Log.Info(" {}: {}", entry.Key, entry.Value); } Log.Info("---------------------------------------------------------------"); this.conf = conf; port = System.Convert.ToInt32(conf.GetProperty(KdcPort)); if (port == 0) { Socket ss = Extensions.CreateServerSocket(0, 1, Extensions.GetAddressByName (conf.GetProperty(KdcBindAddress))); port = ss.GetLocalPort(); ss.Close(); } string orgName = conf.GetProperty(OrgName); string orgDomain = conf.GetProperty(OrgDomain); realm = orgName.ToUpper(Extensions.GetEnglishCulture()) + "." + orgDomain .ToUpper(Extensions.GetEnglishCulture()); }
private FinallyProcessor.Area CompareSubgraphsEx(ControlFlowGraph graph, BasicBlock startSample, HashSet <BasicBlock> catchBlocks, BasicBlock startCatch, int finallytype , Dictionary <BasicBlock, bool> mapLast, bool skippedFirst) { // TODO: correct handling (merging) of multiple paths List <_T1926163957> stack = new List <_T1926163957>(new LinkedList <_T1926163957>()); HashSet <BasicBlock> setSample = new HashSet <BasicBlock>(); Dictionary <string, BasicBlock[]> mapNext = new Dictionary <string, BasicBlock[]>( ); stack.Add(new _T1926163957(this, startCatch, startSample, new List <int[]>())); while (!(stack.Count == 0)) { _T1926163957 entry = stack.RemoveAtReturningValue(0); BasicBlock blockCatch = entry.blockCatch; BasicBlock blockSample = entry.blockSample; bool isFirstBlock = !skippedFirst && blockCatch == startCatch; bool isLastBlock = mapLast.ContainsKey(blockCatch); bool isTrueLastBlock = isLastBlock && (mapLast.GetOrNullable(blockCatch) ?? false); if (!CompareBasicBlocksEx(graph, blockCatch, blockSample, (isFirstBlock ? 1 : 0) | (isTrueLastBlock ? 2 : 0), finallytype, entry.lstStoreVars)) { return(null); } if (blockSample.GetSuccs().Count != blockCatch.GetSuccs().Count) { return(null); } setSample.Add(blockSample); // direct successors for (int i = 0; i < blockCatch.GetSuccs().Count; i++) { BasicBlock sucCatch = blockCatch.GetSuccs()[i]; BasicBlock sucSample = blockSample.GetSuccs()[i]; if (catchBlocks.Contains(sucCatch) && !setSample.Contains(sucSample)) { stack.Add(new _T1926163957(this, sucCatch, sucSample, entry.lstStoreVars)); } } // exception successors if (isLastBlock && blockSample.GetSeq().IsEmpty()) { } else if (blockCatch.GetSuccExceptions().Count == blockSample.GetSuccExceptions(). Count) { // do nothing, blockSample will be removed anyway for (int i = 0; i < blockCatch.GetSuccExceptions().Count; i++) { BasicBlock sucCatch = blockCatch.GetSuccExceptions()[i]; BasicBlock sucSample = blockSample.GetSuccExceptions()[i]; string excCatch = graph.GetExceptionRange(sucCatch, blockCatch).GetUniqueExceptionsString (); string excSample = graph.GetExceptionRange(sucSample, blockSample).GetUniqueExceptionsString (); // FIXME: compare handlers if possible bool equalexc = excCatch == null ? excSample == null : excCatch.Equals(excSample); if (equalexc) { if (catchBlocks.Contains(sucCatch) && !setSample.Contains(sucSample)) { List <int[]> lst = entry.lstStoreVars; if (sucCatch.GetSeq().Length() > 0 && sucSample.GetSeq().Length() > 0) { Instruction instrCatch = sucCatch.GetSeq().GetInstr(0); Instruction instrSample = sucSample.GetSeq().GetInstr(0); if (instrCatch.opcode == ICodeConstants.opc_astore && instrSample.opcode == ICodeConstants .opc_astore) { lst = new List <int[]>(lst); lst.Add(new int[] { instrCatch.Operand(0), instrSample.Operand(0) }); } } stack.Add(new _T1926163957(this, sucCatch, sucSample, lst)); } } else { return(null); } } } else { return(null); } if (isLastBlock) { HashSet <BasicBlock> setSuccs = new HashSet <BasicBlock>(blockSample.GetSuccs()); setSuccs.RemoveAll(setSample); foreach (_T1926163957 stackent in stack) { setSuccs.Remove(stackent.blockSample); } foreach (BasicBlock succ in setSuccs) { if (graph.GetLast() != succ) { // FIXME: why? Sharpen.Collections.Put(mapNext, blockSample.id + "#" + succ.id, new BasicBlock[] { blockSample, succ, isTrueLastBlock ? succ : null }); } } } } return(new FinallyProcessor.Area(startSample, setSample, GetUniqueNext(graph, new HashSet <BasicBlock[]>(mapNext.Values)))); }
internal static Boolean PurgeRevisionsTask(Database enclosingDatabase, IDictionary<String, IList<String>> docsToRevs, IDictionary<String, Object> result) { foreach (string docID in docsToRevs.Keys) { long docNumericID = enclosingDatabase.GetDocNumericID(docID); if (docNumericID == -1) { continue; } var revsPurged = new AList<string>(); var revIDs = docsToRevs [docID]; if (revIDs == null) { return false; } else { if (revIDs.Count == 0) { revsPurged = new AList<string>(); } else { if (revIDs.Contains("*")) { try { var args = new[] { Convert.ToString(docNumericID) }; enclosingDatabase.StorageEngine.ExecSQL("DELETE FROM revs WHERE doc_id=?", args); } catch (SQLException e) { Log.E(Tag, "Error deleting revisions", e); return false; } revsPurged = new AList<string>(); revsPurged.AddItem("*"); } else { Cursor cursor = null; try { var args = new [] { Convert.ToString(docNumericID) }; var queryString = "SELECT revid, sequence, parent FROM revs WHERE doc_id=? ORDER BY sequence DESC"; cursor = enclosingDatabase.StorageEngine.RawQuery(queryString, args); if (!cursor.MoveToNext()) { Log.W(Tag, "No results for query: " + queryString); return false; } var seqsToPurge = new HashSet<long>(); var seqsToKeep = new HashSet<long>(); var revsToPurge = new HashSet<string>(); while (!cursor.IsAfterLast()) { string revID = cursor.GetString(0); long sequence = cursor.GetLong(1); long parent = cursor.GetLong(2); if (seqsToPurge.Contains(sequence) || revIDs.Contains(revID) && !seqsToKeep.Contains (sequence)) { seqsToPurge.AddItem(sequence); revsToPurge.AddItem(revID); if (parent > 0) { seqsToPurge.AddItem(parent); } } else { seqsToPurge.Remove(sequence); revsToPurge.Remove(revID); seqsToKeep.AddItem(parent); } cursor.MoveToNext(); } seqsToPurge.RemoveAll(seqsToKeep); Log.I(Tag, String.Format("Purging doc '{0}' revs ({1}); asked for ({2})", docID, revsToPurge, revIDs)); if (seqsToPurge.Count > 0) { string seqsToPurgeList = String.Join(",", seqsToPurge); string sql = string.Format("DELETE FROM revs WHERE sequence in ({0})", seqsToPurgeList); try { enclosingDatabase.StorageEngine.ExecSQL(sql); } catch (SQLException e) { Log.E(Tag, "Error deleting revisions via: " + sql, e); return false; } } Collections.AddAll(revsPurged, revsToPurge); } catch (SQLException e) { Log.E(Tag, "Error getting revisions", e); return false; } finally { if (cursor != null) { cursor.Close(); } } } } } result[docID] = revsPurged; } return true; }
private bool VerifyFinallyEx(ControlFlowGraph graph, CatchAllStatement fstat, FinallyProcessor.Record information) { HashSet <BasicBlock> tryBlocks = GetAllBasicBlocks(fstat.GetFirst()); HashSet <BasicBlock> catchBlocks = GetAllBasicBlocks(fstat.GetHandler()); int finallytype = information.firstCode; Dictionary <BasicBlock, bool> mapLast = information.mapLast; BasicBlock first = fstat.GetHandler().GetBasichead().GetBlock(); bool skippedFirst = false; if (finallytype == 3) { // empty finally RemoveExceptionInstructionsEx(first, 3, finallytype); if (mapLast.ContainsKey(first)) { graph.GetFinallyExits().Add(first); } return(true); } else if (first.GetSeq().Length() == 1 && finallytype > 0) { BasicBlock firstsuc = first.GetSuccs()[0]; if (catchBlocks.Contains(firstsuc)) { first = firstsuc; skippedFirst = true; } } // identify start blocks HashSet <BasicBlock> startBlocks = new HashSet <BasicBlock>(); foreach (BasicBlock block in tryBlocks) { Sharpen.Collections.AddAll(startBlocks, block.GetSuccs()); } // throw in the try body will point directly to the dummy exit // so remove dummy exit startBlocks.Remove(graph.GetLast()); startBlocks.RemoveAll(tryBlocks); List <FinallyProcessor.Area> lstAreas = new List <FinallyProcessor.Area>(); foreach (BasicBlock start in startBlocks) { FinallyProcessor.Area arr = CompareSubgraphsEx(graph, start, catchBlocks, first, finallytype, mapLast, skippedFirst); if (arr == null) { return(false); } lstAreas.Add(arr); } // try { // DotExporter.toDotFile(graph, new File("c:\\Temp\\fern5.dot"), true); // } catch(Exception ex){ex.printStackTrace();} // delete areas foreach (FinallyProcessor.Area area in lstAreas) { DeleteArea(graph, area); } // try { // DotExporter.toDotFile(graph, new File("c:\\Temp\\fern5.dot"), true); // } catch(Exception ex){ex.printStackTrace();} // INFO: empty basic blocks may remain in the graph! foreach (KeyValuePair <BasicBlock, bool> entry in mapLast) { BasicBlock last = entry.Key; if (entry.Value) { RemoveExceptionInstructionsEx(last, 2, finallytype); graph.GetFinallyExits().Add(last); } } RemoveExceptionInstructionsEx(fstat.GetHandler().GetBasichead().GetBlock(), 1, finallytype ); return(true); }
public DataFlowOpInitializeResult Initialize(DataFlowOpInitializateContext context) { _initialDelayMSec = (long)(initialDelay * 1000); _periodDelayMSec = (long)(interval * 1000); if (context.OutputPorts.Count != 1) { throw new ArgumentException("BeaconSource operator requires one output stream but produces " + context.OutputPorts.Count + " streams"); } // Check if a type is declared DataFlowOpOutputPort port = context.OutputPorts[0]; if (port.OptionalDeclaredType != null && port.OptionalDeclaredType.EventType != null) { EventType outputEventType = port.OptionalDeclaredType.EventType; _produceEventBean = port.OptionalDeclaredType != null && !port.OptionalDeclaredType.IsUnderlying; // compile properties to populate ISet <String> props = new HashSet <string>(_allProperties.Keys); props.RemoveAll(PARAMETER_PROPERTIES); WriteablePropertyDescriptor[] writables = SetupProperties(props.ToArray(), outputEventType, context.StatementContext); _manufacturer = context.ServicesContext.EventAdapterService.GetManufacturer(outputEventType, writables, context.ServicesContext.EngineImportService, false); int index = 0; _evaluators = new ExprEvaluator[writables.Length]; foreach (WriteablePropertyDescriptor writeable in writables) { var providedProperty = _allProperties.Get(writeable.PropertyName); if (providedProperty is ExprNode) { var exprNode = (ExprNode)providedProperty; var validated = ExprNodeUtility.ValidateSimpleGetSubtree( ExprNodeOrigin.DATAFLOWBEACON, exprNode, context.StatementContext, null, false); var exprEvaluator = validated.ExprEvaluator; var widener = TypeWidenerFactory.GetCheckPropertyAssignType( ExprNodeUtility.ToExpressionStringMinPrecedenceSafe(validated), exprEvaluator.ReturnType, writeable.PropertyType, writeable.PropertyName); if (widener != null) { _evaluators[index] = new ProxyExprEvaluator { ProcEvaluate = evaluateParams => widener.Invoke(exprEvaluator.Evaluate(evaluateParams)), ReturnType = null }; } else { _evaluators[index] = exprEvaluator; } } else if (providedProperty == null) { _evaluators[index] = new ProxyExprEvaluator { ProcEvaluate = evaluateParams => null, ReturnType = null }; } else { _evaluators[index] = new ProxyExprEvaluator { ProcEvaluate = evaluateParams => providedProperty, ReturnType = providedProperty.GetType() }; } index++; } return(null); // no changing types } // No type has been declared, we can create one String anonymousTypeName = context.DataflowName + "-beacon"; var types = new LinkedHashMap <String, Object>(); ICollection <String> kprops = _allProperties.Keys; kprops.RemoveAll(PARAMETER_PROPERTIES); int count = 0; _evaluators = new ExprEvaluator[kprops.Count]; foreach (String propertyName in kprops) { var exprNode = (ExprNode)_allProperties.Get(propertyName); var validated = ExprNodeUtility.ValidateSimpleGetSubtree(ExprNodeOrigin.DATAFLOWBEACON, exprNode, context.StatementContext, null, false); var value = validated.ExprEvaluator.Evaluate(new EvaluateParams(null, true, context.AgentInstanceContext)); if (value == null) { types.Put(propertyName, null); } else { types.Put(propertyName, value.GetType()); } _evaluators[count] = new ProxyExprEvaluator { ProcEvaluate = evaluateParams => value, ReturnType = null }; count++; } EventType type = context.ServicesContext.EventAdapterService.CreateAnonymousObjectArrayType(anonymousTypeName, types); return(new DataFlowOpInitializeResult(new GraphTypeDesc[] { new GraphTypeDesc(false, true, type) })); }
/// <summary> /// The main class will run all the test files that are *not* covered in /// the *.tests files, and print out a list of all the tests that pass. /// </summary> /// <remarks> /// The main class will run all the test files that are *not* covered in /// the *.tests files, and print out a list of all the tests that pass. /// </remarks> /// <exception cref="System.IO.IOException"></exception> public static void Main(string[] args) { TextWriter @out = new TextWriter("fix-tests-files.sh"); try { for (int i = 0; i < OPT_LEVELS.Length; i++) { int optLevel = OPT_LEVELS[i]; FilePath testDir = GetTestDir(); FilePath[] allTests = TestUtils.RecursiveListFiles(testDir, new _FileFilter_204()); HashSet<FilePath> diff = new HashSet<FilePath>(Arrays.AsList(allTests)); FilePath[] testFiles = GetTestFiles(optLevel); diff.RemoveAll(Arrays.AsList(testFiles)); List<string> skippedPassed = new List<string>(); int absolutePathLength = testDir.GetAbsolutePath().Length + 1; foreach (FilePath testFile in diff) { try { (new MozillaSuiteTest(testFile, optLevel)).RunMozillaTest(); // strip off testDir string canonicalized = Sharpen.Runtime.Substring(testFile.GetAbsolutePath(), absolutePathLength); canonicalized = canonicalized.Replace('\\', '/'); skippedPassed.Add(canonicalized); } catch { } } // failed, so skip // "skippedPassed" now contains all the tests that are currently // skipped but now pass. Print out shell commands to update the // appropriate *.tests file. if (skippedPassed.Count > 0) { @out.WriteLine("cat >> " + GetTestFilename(optLevel) + " <<EOF"); string[] sorted = Sharpen.Collections.ToArray(skippedPassed, new string[0]); Arrays.Sort(sorted); for (int j = 0; j < sorted.Length; j++) { @out.WriteLine(sorted[j]); } @out.WriteLine("EOF"); } } System.Console.Out.WriteLine("Done."); } finally { @out.Close(); } }
/// <summary> /// Processes the specified achievement type cache for the source entity. /// </summary> /// <param name="rockContext">The rock context.</param> /// <param name="achievementTypeCache">The achievement type cache.</param> /// <param name="sourceEntity">The source entity.</param> /// <returns>The set of attempts that were created or updated</returns> public override HashSet <AchievementAttempt> Process(RockContext rockContext, AchievementTypeCache achievementTypeCache, IEntity sourceEntity) { var updatedAttempts = new HashSet <AchievementAttempt>(); // If we cannot link the transaction to a person, then there is nothing to do if (!(sourceEntity is FinancialTransaction financialTransaction)) { return(updatedAttempts); } // If the achievement type is not active (or null) then there is nothing to do if (achievementTypeCache?.IsActive != true) { return(updatedAttempts); } // If there are unmet prerequisites, then there is nothing to do var achievementTypeService = new AchievementTypeService(rockContext); var unmetPrerequisites = achievementTypeService.GetUnmetPrerequisites(achievementTypeCache.Id, financialTransaction.AuthorizedPersonAliasId.Value); if (unmetPrerequisites.Any()) { return(updatedAttempts); } // If the transaction is a refund, the person is empty, or less than zero amount, then there is nothing to do. if (null != financialTransaction.RefundDetails || !financialTransaction.AuthorizedPersonAliasId.HasValue || financialTransaction.AuthorizedPersonAliasId == 0 || financialTransaction.TotalAmount <= 0M) { return(updatedAttempts); } // Get all of the attempts for this interaction and achievement combo, ordered by start date DESC so that // the most recent attempts can be found with FirstOrDefault var achievementAttemptService = new AchievementAttemptService(rockContext); var attempts = achievementAttemptService.GetOrderedAchieverAttempts(achievementAttemptService.Queryable(), achievementTypeCache, financialTransaction.AuthorizedPersonAliasId.Value); var mostRecentSuccess = attempts.FirstOrDefault(saa => saa.AchievementAttemptEndDateTime.HasValue && saa.IsSuccessful); var overachievementPossible = achievementTypeCache.AllowOverAchievement; var successfulAttemptCount = attempts.Count(saa => saa.IsSuccessful); var maxSuccessesAllowed = achievementTypeCache.MaxAccomplishmentsAllowed ?? int.MaxValue; // If the most recent success is still open and overachievement is allowed, then update it if (overachievementPossible && mostRecentSuccess != null && !mostRecentSuccess.IsClosed) { UpdateOpenAttempt(mostRecentSuccess, achievementTypeCache, financialTransaction); updatedAttempts.Add(mostRecentSuccess); if (!mostRecentSuccess.IsClosed) { // New records can only be created once the open records are all closed return(updatedAttempts); } } // If the success count limit has been reached, then no more processing should be done if (successfulAttemptCount >= maxSuccessesAllowed) { return(updatedAttempts); } // Everything after the most recent success is on the table for deletion. Successes should not be // deleted. Everything after a success might be recalculated because of data changes. // Try to reuse these attempts if they match for continuity, but if the start date is changed, they // get deleted. var attemptsToDelete = attempts; if (mostRecentSuccess != null) { attemptsToDelete = attemptsToDelete .Where(saa => saa.AchievementAttemptStartDateTime > mostRecentSuccess.AchievementAttemptStartDateTime) .ToList(); } var newAttempts = CreateNewAttempts(achievementTypeCache, financialTransaction, mostRecentSuccess); if (newAttempts != null && newAttempts.Any()) { newAttempts = newAttempts.OrderBy(saa => saa.AchievementAttemptStartDateTime).ToList(); foreach (var newAttempt in newAttempts) { // Keep the old attempt if possible, otherwise add a new one var existingAttempt = attemptsToDelete.FirstOrDefault(saa => saa.AchievementAttemptStartDateTime == newAttempt.AchievementAttemptStartDateTime); if (existingAttempt != null) { attemptsToDelete.Remove(existingAttempt); CopyAttempt(newAttempt, existingAttempt); updatedAttempts.Add(existingAttempt); } else { newAttempt.AchieverEntityId = financialTransaction.AuthorizedPersonAliasId.Value; newAttempt.AchievementTypeId = achievementTypeCache.Id; achievementAttemptService.Add(newAttempt); updatedAttempts.Add(newAttempt); } // If this attempt was successful then make re-check the max success limit if (newAttempt.IsSuccessful) { successfulAttemptCount++; if (successfulAttemptCount >= maxSuccessesAllowed && !overachievementPossible) { break; } } } } if (attemptsToDelete.Any()) { updatedAttempts.RemoveAll(attemptsToDelete); achievementAttemptService.DeleteRange(attemptsToDelete); } return(updatedAttempts); }
public bool Run() { foreach (string docID in docsToRevs.Keys) { long docNumericID = this._enclosing.GetDocNumericID(docID); if (docNumericID == -1) { continue; } IList<string> revsPurged = new AList<string>(); IList<string> revIDs = (IList<string>)docsToRevs.Get(docID); if (revIDs == null) { return false; } else { if (revIDs.Count == 0) { revsPurged = new AList<string>(); } else { if (revIDs.Contains("*")) { try { string[] args = new string[] { System.Convert.ToString(docNumericID) }; this._enclosing.database.ExecSQL("DELETE FROM revs WHERE doc_id=?", args); } catch (SQLException e) { Log.E(Database.Tag, "Error deleting revisions", e); return false; } revsPurged = new AList<string>(); revsPurged.AddItem("*"); } else { Cursor cursor = null; try { string[] args = new string[] { System.Convert.ToString(docNumericID) }; string queryString = "SELECT revid, sequence, parent FROM revs WHERE doc_id=? ORDER BY sequence DESC"; cursor = this._enclosing.database.RawQuery(queryString, args); if (!cursor.MoveToNext()) { Log.W(Database.Tag, "No results for query: " + queryString); return false; } ICollection<long> seqsToPurge = new HashSet<long>(); ICollection<long> seqsToKeep = new HashSet<long>(); ICollection<string> revsToPurge = new HashSet<string>(); while (!cursor.IsAfterLast()) { string revID = cursor.GetString(0); long sequence = cursor.GetLong(1); long parent = cursor.GetLong(2); if (seqsToPurge.Contains(sequence) || revIDs.Contains(revID) && !seqsToKeep.Contains (sequence)) { seqsToPurge.AddItem(sequence); revsToPurge.AddItem(revID); if (parent > 0) { seqsToPurge.AddItem(parent); } } else { seqsToPurge.Remove(sequence); revsToPurge.Remove(revID); seqsToKeep.AddItem(parent); } cursor.MoveToNext(); } seqsToPurge.RemoveAll(seqsToKeep); Log.I(Database.Tag, string.Format("Purging doc '%s' revs (%s); asked for (%s)", docID , revsToPurge, revIDs)); if (seqsToPurge.Count > 0) { string seqsToPurgeList = TextUtils.Join(",", seqsToPurge); string sql = string.Format("DELETE FROM revs WHERE sequence in (%s)", seqsToPurgeList ); try { this._enclosing.database.ExecSQL(sql); } catch (SQLException e) { Log.E(Database.Tag, "Error deleting revisions via: " + sql, e); return false; } } Sharpen.Collections.AddAll(revsPurged, revsToPurge); } catch (SQLException e) { Log.E(Database.Tag, "Error getting revisions", e); return false; } finally { if (cursor != null) { cursor.Close(); } } } } } result.Put(docID, revsPurged); } return true; }
private static Statement FindGeneralStatement(Statement stat, bool forceall, Dictionary <int, HashSet <int> > mapExtPost) { VBStyleCollection <Statement, int> stats = stat.GetStats(); VBStyleCollection <List <int>, int> vbPost; if ((mapExtPost.Count == 0)) { FastExtendedPostdominanceHelper extpost = new FastExtendedPostdominanceHelper(); Sharpen.Collections.PutAll(mapExtPost, extpost.GetExtendedPostdominators(stat)); } if (forceall) { vbPost = new VBStyleCollection <List <int>, int>(); List <Statement> lstAll = stat.GetPostReversePostOrderList(); foreach (Statement st in lstAll) { HashSet <int> set = mapExtPost.GetOrNull(st.id); if (set != null) { vbPost.AddWithKey(new List <int>(set), st.id); } } // FIXME: sort order!! // tail statements HashSet <int> setFirst = mapExtPost.GetOrNull(stat.GetFirst().id); if (setFirst != null) { foreach (int id in setFirst) { List <int> lst = vbPost.GetWithKey(id); if (lst == null) { vbPost.AddWithKey(lst = new List <int>(), id); } lst.Add(id); } } } else { vbPost = CalcPostDominators(stat); } for (int k = 0; k < vbPost.Count; k++) { int headid = vbPost.GetKey(k); List <int> posts = vbPost[k]; if (!mapExtPost.ContainsKey(headid) && !(posts.Count == 1 && posts[0].Equals(headid ))) { continue; } Statement head = stats.GetWithKey(headid); HashSet <int> setExtPosts = mapExtPost.GetOrNull(headid); foreach (int postId in posts) { if (!postId.Equals(headid) && !setExtPosts.Contains(postId)) { continue; } Statement post = stats.GetWithKey(postId); if (post == null) { // possible in case of an inherited postdominance set continue; } bool same = (post == head); HashSet <Statement> setNodes = new HashSet <Statement>(); HashSet <Statement> setPreds = new HashSet <Statement>(); // collect statement nodes HashSet <Statement> setHandlers = new HashSet <Statement>(); setHandlers.Add(head); while (true) { bool hdfound = false; foreach (Statement handler in setHandlers) { if (setNodes.Contains(handler)) { continue; } bool addhd = (setNodes.Count == 0); // first handler == head if (!addhd) { List <Statement> hdsupp = handler.GetNeighbours(StatEdge.Type_Exception, Statement .Direction_Backward); addhd = (setNodes.ContainsAll(hdsupp) && (setNodes.Count > hdsupp.Count || setNodes .Count == 1)); } // strict subset if (addhd) { LinkedList <Statement> lstStack = new LinkedList <Statement>(); lstStack.AddLast(handler); while (!(lstStack.Count == 0)) { Statement st = lstStack.RemoveAtReturningValue(0); if (!(setNodes.Contains(st) || (!same && st == post))) { setNodes.Add(st); if (st != head) { // record predeccessors except for the head Sharpen.Collections.AddAll(setPreds, st.GetNeighbours(StatEdge.Type_Regular, Statement .Direction_Backward)); } // put successors on the stack Sharpen.Collections.AddAll(lstStack, st.GetNeighbours(StatEdge.Type_Regular, Statement .Direction_Forward)); // exception edges Sharpen.Collections.AddAll(setHandlers, st.GetNeighbours(StatEdge.Type_Exception, Statement.Direction_Forward)); } } hdfound = true; setHandlers.Remove(handler); break; } } if (!hdfound) { break; } } // check exception handlers setHandlers.Clear(); foreach (Statement st in setNodes) { Sharpen.Collections.AddAll(setHandlers, st.GetNeighbours(StatEdge.Type_Exception, Statement.Direction_Forward)); } setHandlers.RemoveAll(setNodes); bool excok = true; foreach (Statement handler in setHandlers) { if (!handler.GetNeighbours(StatEdge.Type_Exception, Statement.Direction_Backward) .ContainsAll(setNodes)) { excok = false; break; } } // build statement and return if (excok) { Statement res; setPreds.RemoveAll(setNodes); if (setPreds.Count == 0) { if ((setNodes.Count > 1 || head.GetNeighbours(StatEdge.Type_Regular, Statement.Direction_Backward ).Contains(head)) && setNodes.Count < stats.Count) { if (CheckSynchronizedCompleteness(setNodes)) { res = new GeneralStatement(head, setNodes, same ? null : post); stat.CollapseNodesToStatement(res); return(res); } } } } } } return(null); }
public void Remove(params GameObject[] gameObjects) => Encompassed.RemoveAll(gameObjects);
public override void Dispose() { lock (this) { // files that we tried to delete, but couldn't because readers were open. // all that matters is that we tried! (they will eventually go away) ISet<string> pendingDeletions = new HashSet<string>(OpenFilesDeleted); MaybeYield(); if (OpenFiles == null) { OpenFiles = new Dictionary<string, int>(); OpenFilesDeleted = new HashSet<string>(); } if (OpenFiles.Count > 0) { // print the first one as its very verbose otherwise Exception cause = null; IEnumerator<Exception> stacktraces = OpenFileHandles.Values.GetEnumerator(); if (stacktraces.MoveNext()) { cause = stacktraces.Current; } // RuntimeException instead ofSystem.IO.IOException because // super() does not throwSystem.IO.IOException currently: throw new Exception("MockDirectoryWrapper: cannot close: there are still open files: " + String.Join(" ,", OpenFiles.ToArray().Select(x => x.Key)), cause); } if (OpenLocks.Count > 0) { throw new Exception("MockDirectoryWrapper: cannot close: there are still open locks: " + String.Join(" ,", OpenLocks.ToArray())); } IsOpen = false; if (CheckIndexOnClose) { RandomIOExceptionRate_Renamed = 0.0; RandomIOExceptionRateOnOpen_Renamed = 0.0; if (DirectoryReader.IndexExists(this)) { if (LuceneTestCase.VERBOSE) { Console.WriteLine("\nNOTE: MockDirectoryWrapper: now crush"); } Crash(); // corrupt any unsynced-files if (LuceneTestCase.VERBOSE) { Console.WriteLine("\nNOTE: MockDirectoryWrapper: now run CheckIndex"); } TestUtil.CheckIndex(this, CrossCheckTermVectorsOnClose); // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles if (AssertNoUnreferencedFilesOnClose) { // now look for unreferenced files: discount ones that we tried to delete but could not HashSet<string> allFiles = new HashSet<string>(Arrays.AsList(ListAll())); allFiles.RemoveAll(pendingDeletions); string[] startFiles = allFiles.ToArray(/*new string[0]*/); IndexWriterConfig iwc = new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null); iwc.SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); (new IndexWriter(@in, iwc)).Rollback(); string[] endFiles = @in.ListAll(); ISet<string> startSet = new SortedSet<string>(Arrays.AsList(startFiles)); ISet<string> endSet = new SortedSet<string>(Arrays.AsList(endFiles)); if (pendingDeletions.Contains("segments.gen") && endSet.Contains("segments.gen")) { // this is possible if we hit an exception while writing segments.gen, we try to delete it // and it ends out in pendingDeletions (but IFD wont remove this). startSet.Add("segments.gen"); if (LuceneTestCase.VERBOSE) { Console.WriteLine("MDW: Unreferenced check: Ignoring segments.gen that we could not delete."); } } // its possible we cannot delete the segments_N on windows if someone has it open and // maybe other files too, depending on timing. normally someone on windows wouldnt have // an issue (IFD would nuke this stuff eventually), but we pass NoDeletionPolicy... foreach (string file in pendingDeletions) { if (file.StartsWith("segments") && !file.Equals("segments.gen") && endSet.Contains(file)) { startSet.Add(file); if (LuceneTestCase.VERBOSE) { Console.WriteLine("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete."); } SegmentInfos sis = new SegmentInfos(); try { sis.Read(@in, file); } catch (System.IO.IOException ioe) { // OK: likely some of the .si files were deleted } try { ISet<string> ghosts = new HashSet<string>(sis.Files(@in, false)); foreach (string s in ghosts) { if (endSet.Contains(s) && !startSet.Contains(s)) { Debug.Assert(pendingDeletions.Contains(s)); if (LuceneTestCase.VERBOSE) { Console.WriteLine("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + "from " + file + " that we could not delete."); } startSet.Add(s); } } } catch (Exception t) { Console.Error.WriteLine("ERROR processing leftover segments file " + file + ":"); Console.WriteLine(t.ToString()); Console.Write(t.StackTrace); } } } startFiles = startSet.ToArray(/*new string[0]*/); endFiles = endSet.ToArray(/*new string[0]*/); if (!Arrays.Equals(startFiles, endFiles)) { IList<string> removed = new List<string>(); foreach (string fileName in startFiles) { if (!endSet.Contains(fileName)) { removed.Add(fileName); } } IList<string> added = new List<string>(); foreach (string fileName in endFiles) { if (!startSet.Contains(fileName)) { added.Add(fileName); } } string extras; if (removed.Count != 0) { extras = "\n\nThese files were removed: " + removed; } else { extras = ""; } if (added.Count != 0) { extras += "\n\nThese files were added (waaaaaaaaaat!): " + added; } if (pendingDeletions.Count != 0) { extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } Debug.Assert(false, "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); } DirectoryReader ir1 = DirectoryReader.Open(this); int numDocs1 = ir1.NumDocs; ir1.Dispose(); (new IndexWriter(this, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null))).Dispose(); DirectoryReader ir2 = DirectoryReader.Open(this); int numDocs2 = ir2.NumDocs; ir2.Dispose(); Debug.Assert(numDocs1 == numDocs2, "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); } } } @in.Dispose(); } }