private async Task<SyntaxNode> AddNamespaceImportsAsync( Document document, SemanticModel model, OptionSet options, IEnumerable<INamespaceSymbol> namespaces, CancellationToken cancellationToken) { var existingNamespaces = new HashSet<INamespaceSymbol>(); await this.GetExistingImportedNamespacesAsync(document, model, existingNamespaces, cancellationToken).ConfigureAwait(false); var namespacesToAdd = new HashSet<INamespaceSymbol>(namespaces); namespacesToAdd.RemoveAll(existingNamespaces); var root = await model.SyntaxTree.GetRootAsync(cancellationToken).ConfigureAwait(false); if (namespacesToAdd.Count == 0) { return root; } var gen = SyntaxGenerator.GetGenerator(document); var newRoot = root; foreach (var import in namespacesToAdd.Select(ns => gen.NamespaceImportDeclaration(ns.ToDisplayString()).WithAdditionalAnnotations(Simplifier.Annotation))) { newRoot = this.InsertNamespaceImport(newRoot, gen, import, options); } return newRoot; }
private SyntaxNode AddNamespaceImports( Document document, SemanticModel model, OptionSet options, IEnumerable<INamespaceSymbol> namespaces) { var existingNamespaces = new HashSet<INamespaceSymbol>(); this.GetExistingImportedNamespaces(document, model, existingNamespaces); var namespacesToAdd = new HashSet<INamespaceSymbol>(namespaces); namespacesToAdd.RemoveAll(existingNamespaces); var root = model.SyntaxTree.GetRoot(); if (namespacesToAdd.Count == 0) { return root; } var gen = SyntaxGenerator.GetGenerator(document); var newRoot = root; foreach (var import in namespacesToAdd.Select(ns => gen.NamespaceImportDeclaration(ns.ToDisplayString()).WithAdditionalAnnotations(Simplifier.Annotation))) { newRoot = this.InsertNamespaceImport(newRoot, gen, import, options); } return newRoot; }
internal static Boolean PurgeRevisionsTask(Database enclosingDatabase, IDictionary<String, IList<String>> docsToRevs, IDictionary<String, Object> result) { foreach (string docID in docsToRevs.Keys) { long docNumericID = enclosingDatabase.GetDocNumericID(docID); if (docNumericID == -1) { continue; } var revsPurged = new AList<string>(); var revIDs = docsToRevs [docID]; if (revIDs == null) { return false; } else { if (revIDs.Count == 0) { revsPurged = new AList<string>(); } else { if (revIDs.Contains("*")) { try { var args = new[] { Convert.ToString(docNumericID) }; enclosingDatabase.StorageEngine.ExecSQL("DELETE FROM revs WHERE doc_id=?", args); } catch (SQLException e) { Log.E(Tag, "Error deleting revisions", e); return false; } revsPurged = new AList<string>(); revsPurged.AddItem("*"); } else { Cursor cursor = null; try { var args = new [] { Convert.ToString(docNumericID) }; var queryString = "SELECT revid, sequence, parent FROM revs WHERE doc_id=? ORDER BY sequence DESC"; cursor = enclosingDatabase.StorageEngine.RawQuery(queryString, args); if (!cursor.MoveToNext()) { Log.W(Tag, "No results for query: " + queryString); return false; } var seqsToPurge = new HashSet<long>(); var seqsToKeep = new HashSet<long>(); var revsToPurge = new HashSet<string>(); while (!cursor.IsAfterLast()) { string revID = cursor.GetString(0); long sequence = cursor.GetLong(1); long parent = cursor.GetLong(2); if (seqsToPurge.Contains(sequence) || revIDs.Contains(revID) && !seqsToKeep.Contains (sequence)) { seqsToPurge.AddItem(sequence); revsToPurge.AddItem(revID); if (parent > 0) { seqsToPurge.AddItem(parent); } } else { seqsToPurge.Remove(sequence); revsToPurge.Remove(revID); seqsToKeep.AddItem(parent); } cursor.MoveToNext(); } seqsToPurge.RemoveAll(seqsToKeep); Log.I(Tag, String.Format("Purging doc '{0}' revs ({1}); asked for ({2})", docID, revsToPurge, revIDs)); if (seqsToPurge.Count > 0) { string seqsToPurgeList = String.Join(",", seqsToPurge); string sql = string.Format("DELETE FROM revs WHERE sequence in ({0})", seqsToPurgeList); try { enclosingDatabase.StorageEngine.ExecSQL(sql); } catch (SQLException e) { Log.E(Tag, "Error deleting revisions via: " + sql, e); return false; } } Collections.AddAll(revsPurged, revsToPurge); } catch (SQLException e) { Log.E(Tag, "Error getting revisions", e); return false; } finally { if (cursor != null) { cursor.Close(); } } } } } result[docID] = revsPurged; } return true; }
public bool Run() { foreach (string docID in docsToRevs.Keys) { long docNumericID = this._enclosing.GetDocNumericID(docID); if (docNumericID == -1) { continue; } IList<string> revsPurged = new AList<string>(); IList<string> revIDs = (IList<string>)docsToRevs.Get(docID); if (revIDs == null) { return false; } else { if (revIDs.Count == 0) { revsPurged = new AList<string>(); } else { if (revIDs.Contains("*")) { try { string[] args = new string[] { System.Convert.ToString(docNumericID) }; this._enclosing.database.ExecSQL("DELETE FROM revs WHERE doc_id=?", args); } catch (SQLException e) { Log.E(Database.Tag, "Error deleting revisions", e); return false; } revsPurged = new AList<string>(); revsPurged.AddItem("*"); } else { Cursor cursor = null; try { string[] args = new string[] { System.Convert.ToString(docNumericID) }; string queryString = "SELECT revid, sequence, parent FROM revs WHERE doc_id=? ORDER BY sequence DESC"; cursor = this._enclosing.database.RawQuery(queryString, args); if (!cursor.MoveToNext()) { Log.W(Database.Tag, "No results for query: " + queryString); return false; } ICollection<long> seqsToPurge = new HashSet<long>(); ICollection<long> seqsToKeep = new HashSet<long>(); ICollection<string> revsToPurge = new HashSet<string>(); while (!cursor.IsAfterLast()) { string revID = cursor.GetString(0); long sequence = cursor.GetLong(1); long parent = cursor.GetLong(2); if (seqsToPurge.Contains(sequence) || revIDs.Contains(revID) && !seqsToKeep.Contains (sequence)) { seqsToPurge.AddItem(sequence); revsToPurge.AddItem(revID); if (parent > 0) { seqsToPurge.AddItem(parent); } } else { seqsToPurge.Remove(sequence); revsToPurge.Remove(revID); seqsToKeep.AddItem(parent); } cursor.MoveToNext(); } seqsToPurge.RemoveAll(seqsToKeep); Log.I(Database.Tag, string.Format("Purging doc '%s' revs (%s); asked for (%s)", docID , revsToPurge, revIDs)); if (seqsToPurge.Count > 0) { string seqsToPurgeList = TextUtils.Join(",", seqsToPurge); string sql = string.Format("DELETE FROM revs WHERE sequence in (%s)", seqsToPurgeList ); try { this._enclosing.database.ExecSQL(sql); } catch (SQLException e) { Log.E(Database.Tag, "Error deleting revisions via: " + sql, e); return false; } } Sharpen.Collections.AddAll(revsPurged, revsToPurge); } catch (SQLException e) { Log.E(Database.Tag, "Error getting revisions", e); return false; } finally { if (cursor != null) { cursor.Close(); } } } } } result.Put(docID, revsPurged); } return true; }
/// <summary> /// The main class will run all the test files that are *not* covered in /// the *.tests files, and print out a list of all the tests that pass. /// </summary> /// <remarks> /// The main class will run all the test files that are *not* covered in /// the *.tests files, and print out a list of all the tests that pass. /// </remarks> /// <exception cref="System.IO.IOException"></exception> public static void Main(string[] args) { TextWriter @out = new TextWriter("fix-tests-files.sh"); try { for (int i = 0; i < OPT_LEVELS.Length; i++) { int optLevel = OPT_LEVELS[i]; FilePath testDir = GetTestDir(); FilePath[] allTests = TestUtils.RecursiveListFiles(testDir, new _FileFilter_204()); HashSet<FilePath> diff = new HashSet<FilePath>(Arrays.AsList(allTests)); FilePath[] testFiles = GetTestFiles(optLevel); diff.RemoveAll(Arrays.AsList(testFiles)); List<string> skippedPassed = new List<string>(); int absolutePathLength = testDir.GetAbsolutePath().Length + 1; foreach (FilePath testFile in diff) { try { (new MozillaSuiteTest(testFile, optLevel)).RunMozillaTest(); // strip off testDir string canonicalized = Sharpen.Runtime.Substring(testFile.GetAbsolutePath(), absolutePathLength); canonicalized = canonicalized.Replace('\\', '/'); skippedPassed.Add(canonicalized); } catch { } } // failed, so skip // "skippedPassed" now contains all the tests that are currently // skipped but now pass. Print out shell commands to update the // appropriate *.tests file. if (skippedPassed.Count > 0) { @out.WriteLine("cat >> " + GetTestFilename(optLevel) + " <<EOF"); string[] sorted = Sharpen.Collections.ToArray(skippedPassed, new string[0]); Arrays.Sort(sorted); for (int j = 0; j < sorted.Length; j++) { @out.WriteLine(sorted[j]); } @out.WriteLine("EOF"); } } System.Console.Out.WriteLine("Done."); } finally { @out.Close(); } }
public override void Dispose() { lock (this) { // files that we tried to delete, but couldn't because readers were open. // all that matters is that we tried! (they will eventually go away) ISet<string> pendingDeletions = new HashSet<string>(OpenFilesDeleted); MaybeYield(); if (OpenFiles == null) { OpenFiles = new Dictionary<string, int>(); OpenFilesDeleted = new HashSet<string>(); } if (OpenFiles.Count > 0) { // print the first one as its very verbose otherwise Exception cause = null; IEnumerator<Exception> stacktraces = OpenFileHandles.Values.GetEnumerator(); if (stacktraces.MoveNext()) { cause = stacktraces.Current; } // RuntimeException instead ofSystem.IO.IOException because // super() does not throwSystem.IO.IOException currently: throw new Exception("MockDirectoryWrapper: cannot close: there are still open files: " + String.Join(" ,", OpenFiles.ToArray().Select(x => x.Key)), cause); } if (OpenLocks.Count > 0) { throw new Exception("MockDirectoryWrapper: cannot close: there are still open locks: " + String.Join(" ,", OpenLocks.ToArray())); } IsOpen = false; if (CheckIndexOnClose) { RandomIOExceptionRate_Renamed = 0.0; RandomIOExceptionRateOnOpen_Renamed = 0.0; if (DirectoryReader.IndexExists(this)) { if (LuceneTestCase.VERBOSE) { Console.WriteLine("\nNOTE: MockDirectoryWrapper: now crush"); } Crash(); // corrupt any unsynced-files if (LuceneTestCase.VERBOSE) { Console.WriteLine("\nNOTE: MockDirectoryWrapper: now run CheckIndex"); } TestUtil.CheckIndex(this, CrossCheckTermVectorsOnClose); // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles if (AssertNoUnreferencedFilesOnClose) { // now look for unreferenced files: discount ones that we tried to delete but could not HashSet<string> allFiles = new HashSet<string>(Arrays.AsList(ListAll())); allFiles.RemoveAll(pendingDeletions); string[] startFiles = allFiles.ToArray(/*new string[0]*/); IndexWriterConfig iwc = new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null); iwc.SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); (new IndexWriter(@in, iwc)).Rollback(); string[] endFiles = @in.ListAll(); ISet<string> startSet = new SortedSet<string>(Arrays.AsList(startFiles)); ISet<string> endSet = new SortedSet<string>(Arrays.AsList(endFiles)); if (pendingDeletions.Contains("segments.gen") && endSet.Contains("segments.gen")) { // this is possible if we hit an exception while writing segments.gen, we try to delete it // and it ends out in pendingDeletions (but IFD wont remove this). startSet.Add("segments.gen"); if (LuceneTestCase.VERBOSE) { Console.WriteLine("MDW: Unreferenced check: Ignoring segments.gen that we could not delete."); } } // its possible we cannot delete the segments_N on windows if someone has it open and // maybe other files too, depending on timing. normally someone on windows wouldnt have // an issue (IFD would nuke this stuff eventually), but we pass NoDeletionPolicy... foreach (string file in pendingDeletions) { if (file.StartsWith("segments") && !file.Equals("segments.gen") && endSet.Contains(file)) { startSet.Add(file); if (LuceneTestCase.VERBOSE) { Console.WriteLine("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete."); } SegmentInfos sis = new SegmentInfos(); try { sis.Read(@in, file); } catch (System.IO.IOException ioe) { // OK: likely some of the .si files were deleted } try { ISet<string> ghosts = new HashSet<string>(sis.Files(@in, false)); foreach (string s in ghosts) { if (endSet.Contains(s) && !startSet.Contains(s)) { Debug.Assert(pendingDeletions.Contains(s)); if (LuceneTestCase.VERBOSE) { Console.WriteLine("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + "from " + file + " that we could not delete."); } startSet.Add(s); } } } catch (Exception t) { Console.Error.WriteLine("ERROR processing leftover segments file " + file + ":"); Console.WriteLine(t.ToString()); Console.Write(t.StackTrace); } } } startFiles = startSet.ToArray(/*new string[0]*/); endFiles = endSet.ToArray(/*new string[0]*/); if (!Arrays.Equals(startFiles, endFiles)) { IList<string> removed = new List<string>(); foreach (string fileName in startFiles) { if (!endSet.Contains(fileName)) { removed.Add(fileName); } } IList<string> added = new List<string>(); foreach (string fileName in endFiles) { if (!startSet.Contains(fileName)) { added.Add(fileName); } } string extras; if (removed.Count != 0) { extras = "\n\nThese files were removed: " + removed; } else { extras = ""; } if (added.Count != 0) { extras += "\n\nThese files were added (waaaaaaaaaat!): " + added; } if (pendingDeletions.Count != 0) { extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } Debug.Assert(false, "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); } DirectoryReader ir1 = DirectoryReader.Open(this); int numDocs1 = ir1.NumDocs; ir1.Dispose(); (new IndexWriter(this, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null))).Dispose(); DirectoryReader ir2 = DirectoryReader.Open(this); int numDocs2 = ir2.NumDocs; ir2.Dispose(); Debug.Assert(numDocs1 == numDocs2, "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); } } } @in.Dispose(); } }