public virtual void TestThreadStarvationNoDeleteNRTReader() { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES); Directory d = NewDirectory(); CountdownEvent latch = new CountdownEvent(1); CountdownEvent signal = new CountdownEvent(1); LatchedIndexWriter _writer = new LatchedIndexWriter(d, conf, latch, signal); TrackingIndexWriter writer = new TrackingIndexWriter(_writer); SearcherManager manager = new SearcherManager(_writer, false, null); Document doc = new Document(); doc.Add(NewTextField("test", "test", Field.Store.YES)); writer.AddDocument(doc); manager.MaybeRefresh(); var t = new ThreadAnonymousClass(this, latch, signal, writer, manager); t.Start(); _writer.waitAfterUpdate = true; // wait in addDocument to let some reopens go through long lastGen = writer.UpdateDocument(new Term("foo", "bar"), doc); // once this returns the doc is already reflected in the last reopen assertFalse(manager.IsSearcherCurrent()); // false since there is a delete in the queue IndexSearcher searcher = manager.Acquire(); try { assertEquals(2, searcher.IndexReader.NumDocs); } finally { manager.Release(searcher); } ControlledRealTimeReopenThread <IndexSearcher> thread = new ControlledRealTimeReopenThread <IndexSearcher>(writer, manager, 0.01, 0.01); thread.Start(); // start reopening if (Verbose) { Console.WriteLine("waiting now for generation " + lastGen); } AtomicBoolean finished = new AtomicBoolean(false); var waiter = new ThreadAnonymousClass2(this, lastGen, thread, finished); waiter.Start(); manager.MaybeRefresh(); waiter.Join(1000); if (!finished) { waiter.Interrupt(); fail("thread deadlocked on waitForGeneration"); } thread.Dispose(); thread.Join(); IOUtils.Dispose(manager, _writer, d); }
public virtual void Test() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy())); IList <long?> numbers = new List <long?>(); IList <BytesRef> binary = new List <BytesRef>(); IList <BytesRef> sorted = new List <BytesRef>(); int numDocs = AtLeast(100); for (int i = 0; i < numDocs; i++) { Document d = new Document(); long number = Random.NextInt64(); d.Add(new NumericDocValuesField("number", number)); BytesRef bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random)); d.Add(new BinaryDocValuesField("bytes", bytes)); binary.Add(bytes); bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random)); d.Add(new SortedDocValuesField("sorted", bytes)); sorted.Add(bytes); w.AddDocument(d); numbers.Add(number); } w.ForceMerge(1); IndexReader r = w.GetReader(); w.Dispose(); Assert.AreEqual(1, r.Leaves.Count); AtomicReader ar = (AtomicReader)r.Leaves[0].Reader; int numThreads = TestUtil.NextInt32(Random, 2, 5); IList <ThreadJob> threads = new List <ThreadJob>(); CountdownEvent startingGun = new CountdownEvent(1); for (int t = 0; t < numThreads; t++) { Random threadRandom = new Random(Random.Next()); ThreadJob thread = new ThreadAnonymousClass(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom); thread.Start(); threads.Add(thread); } startingGun.Signal(); foreach (ThreadJob thread in threads) { thread.Join(); } r.Dispose(); dir.Dispose(); }
protected override void DoSearching(TaskScheduler es, long stopTime) { ThreadJob reopenThread = new ThreadAnonymousClass(this, stopTime); reopenThread.IsBackground = (true); reopenThread.Start(); RunSearchThreads(stopTime); reopenThread.Join(); }
private void RunTest(Random random, Directory dir) { // Run for ~1 seconds long stopTime = Environment.TickCount + 1000; SnapshotDeletionPolicy dp = DeletionPolicy; IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetIndexDeletionPolicy(dp).SetMaxBufferedDocs(2)); // Verify we catch misuse: try { dp.Snapshot(); Assert.Fail("did not hit exception"); } #pragma warning disable 168 catch (InvalidOperationException ise) #pragma warning restore 168 { // expected } dp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy; writer.Commit(); ThreadJob t = new ThreadAnonymousClass(stopTime, writer, NewField); t.Start(); // While the above indexing thread is running, take many // backups: do { BackupIndex(dir, dp); Thread.Sleep(20); } while (t.IsAlive); t.Join(); // Add one more document to force writer to commit a // final segment, so deletion policy has a chance to // delete again: Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.StoreTermVectors = true; customType.StoreTermVectorPositions = true; customType.StoreTermVectorOffsets = true; doc.Add(NewField("content", "aaa", customType)); writer.AddDocument(doc); // Make sure we don't have any leftover files in the // directory: writer.Dispose(); TestIndexWriter.AssertNoUnreferencedFiles(dir, "some files were not deleted but should have been"); }
public virtual void Test() { Directory dir = NewFSDirectory(CreateTempDir("livefieldupdates")); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); IndexWriter w = new IndexWriter(dir, iwc); SearcherManager mgr = new SearcherManager(w, true, new SearcherFactoryAnonymousClass()); const int missing = -1; LiveFieldValues <IndexSearcher, int?> rt = new LiveFieldValuesAnonymousClass(mgr, missing); int numThreads = TestUtil.NextInt32(Random, 2, 5); if (Verbose) { Console.WriteLine(numThreads + " threads"); } CountdownEvent startingGun = new CountdownEvent(1); IList <ThreadJob> threads = new JCG.List <ThreadJob>(); int iters = AtLeast(1000); int idCount = TestUtil.NextInt32(Random, 100, 10000); double reopenChance = Random.NextDouble() * 0.01; double deleteChance = Random.NextDouble() * 0.25; double addChance = Random.NextDouble() * 0.5; for (int t = 0; t < numThreads; t++) { int threadID = t; Random threadRandom = new Random(Random.Next()); ThreadJob thread = new ThreadAnonymousClass(w, mgr, missing, rt, startingGun, iters, idCount, reopenChance, deleteChance, addChance, t, threadID, threadRandom); threads.Add(thread); thread.Start(); } startingGun.Signal(); foreach (ThreadJob thread in threads) { thread.Join(); } mgr.MaybeRefresh(); Assert.AreEqual(0, rt.Count); rt.Dispose(); mgr.Dispose(); w.Dispose(); dir.Dispose(); }
public virtual void TestThreadLeak() { ThreadJob t = new ThreadAnonymousClass(this); t.Start(); while (!t.IsAlive) { Thread.Yield(); } // once alive, leave it to run outside of the test scope. }
public virtual void Test() { Directory d = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random); analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); MyIndexWriter w = new MyIndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); // Try to make an index that requires merging: w.Config.SetMaxBufferedDocs(TestUtil.NextInt32(Random, 2, 11)); int numStartDocs = AtLeast(20); LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); for (int docIDX = 0; docIDX < numStartDocs; docIDX++) { w.AddDocument(docs.NextDoc()); } MergePolicy mp = w.Config.MergePolicy; int mergeAtOnce = 1 + w.segmentInfos.Count; if (mp is TieredMergePolicy) { ((TieredMergePolicy)mp).MaxMergeAtOnce = mergeAtOnce; } else if (mp is LogMergePolicy) { ((LogMergePolicy)mp).MergeFactor = mergeAtOnce; } else { // skip test w.Dispose(); d.Dispose(); return; } AtomicBoolean doStop = new AtomicBoolean(); w.Config.SetMaxBufferedDocs(2); ThreadJob t = new ThreadAnonymousClass(this, w, numStartDocs, docs, doStop); t.Start(); w.ForceMerge(1); doStop.Value = true; t.Join(); Assert.IsTrue(w.mergeCount <= 1, "merge count is " + w.mergeCount); w.Dispose(); d.Dispose(); docs.Dispose(); }
public virtual void TestPartiallyAppliedGlobalSlice() { DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue(); System.Reflection.FieldInfo field = typeof(DocumentsWriterDeleteQueue).GetField("globalBufferLock", BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Instance); ReentrantLock @lock = (ReentrantLock)field.GetValue(queue); @lock.Lock(); var t = new ThreadAnonymousClass(this, queue); t.Start(); t.Join(); @lock.Unlock(); Assert.IsTrue(queue.AnyChanges(), "changes in del queue but not in slice yet"); queue.TryApplyGlobalSlice(); Assert.IsTrue(queue.AnyChanges(), "changes in global buffer"); FrozenBufferedUpdates freezeGlobalBuffer = queue.FreezeGlobalBuffer(null); Assert.IsTrue(freezeGlobalBuffer.Any()); Assert.AreEqual(1, freezeGlobalBuffer.termCount); Assert.IsFalse(queue.AnyChanges(), "all changes applied"); }
public virtual void TestHashCodeWithThreads() { AutomatonQuery[] queries = new AutomatonQuery[1000]; for (int i = 0; i < queries.Length; i++) { queries[i] = new AutomatonQuery(new Term("bogus", "bogus"), AutomatonTestUtil.RandomAutomaton(Random)); } CountdownEvent startingGun = new CountdownEvent(1); int numThreads = TestUtil.NextInt32(Random, 2, 5); ThreadJob[] threads = new ThreadJob[numThreads]; for (int threadID = 0; threadID < numThreads; threadID++) { ThreadJob thread = new ThreadAnonymousClass(this, queries, startingGun); threads[threadID] = thread; thread.Start(); } startingGun.Signal(); foreach (ThreadJob thread in threads) { thread.Join(); } }
public virtual void Test() { Directory dir = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random); analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); RandomIndexWriter w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, dir, analyzer); LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); int charsToIndex = AtLeast(100000); int charsIndexed = 0; //System.out.println("bytesToIndex=" + charsToIndex); while (charsIndexed < charsToIndex) { Document doc = docs.NextDoc(); charsIndexed += doc.Get("body").Length; w.AddDocument(doc); //System.out.println(" bytes=" + charsIndexed + " add: " + doc); } IndexReader r = w.GetReader(); //System.out.println("numDocs=" + r.NumDocs); w.Dispose(); IndexSearcher s = NewSearcher(r); Terms terms = MultiFields.GetFields(r).GetTerms("body"); int termCount = 0; TermsEnum termsEnum = terms.GetEnumerator(); while (termsEnum.MoveNext()) { termCount++; } Assert.IsTrue(termCount > 0); // Target ~10 terms to search: double chance = 10.0 / termCount; termsEnum = terms.GetEnumerator(termsEnum); IDictionary <BytesRef, TopDocs> answers = new Dictionary <BytesRef, TopDocs>(); while (termsEnum.MoveNext()) { if (Random.NextDouble() <= chance) { BytesRef term = BytesRef.DeepCopyOf(termsEnum.Term); answers[term] = s.Search(new TermQuery(new Term("body", term)), 100); } } if (answers.Count > 0) { CountdownEvent startingGun = new CountdownEvent(1); int numThreads = TestUtil.NextInt32(Random, 2, 5); ThreadJob[] threads = new ThreadJob[numThreads]; for (int threadID = 0; threadID < numThreads; threadID++) { ThreadJob thread = new ThreadAnonymousClass(this, s, answers, startingGun); threads[threadID] = thread; thread.Start(); } startingGun.Signal(); foreach (ThreadJob thread in threads) { thread.Join(); } } r.Dispose(); dir.Dispose(); }
public virtual void TestNoWaitClose() { Directory directory = NewDirectory(); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.IsTokenized = false; Field idField = NewField("id", "", customType); doc.Add(idField); for (int pass = 0; pass < 2; pass++) { if (Verbose) { Console.WriteLine("TEST: pass="******"TEST: iter=" + iter); } for (int j = 0; j < 199; j++) { idField.SetStringValue(Convert.ToString(iter * 201 + j)); writer.AddDocument(doc); } int delID = iter * 199; for (int j = 0; j < 20; j++) { writer.DeleteDocuments(new Term("id", Convert.ToString(delID))); delID += 5; } // Force a bunch of merge threads to kick off so we // stress out aborting them on close: ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2; IndexWriter finalWriter = writer; IList <Exception> failure = new JCG.List <Exception>(); ThreadJob t1 = new ThreadAnonymousClass(this, doc, finalWriter, failure); if (failure.Count > 0) { ExceptionDispatchInfo.Capture(failure[0]).Throw(); // LUCENENET: Rethrow to preserve stack details from the original throw } t1.Start(); writer.Dispose(false); t1.Join(); // Make sure reader can read IndexReader reader = DirectoryReader.Open(directory); reader.Dispose(); // Reopen writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy())); } writer.Dispose(); } directory.Dispose(); }
public virtual void TestNrt() { Store.Directory dir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); // Don't allow tiny maxBufferedDocs; it can make this // test too slow: iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs)); // MockRandom/AlcololicMergePolicy are too slow: TieredMergePolicy tmp = new TieredMergePolicy(); tmp.FloorSegmentMB = .001; iwc.SetMergePolicy(tmp); IndexWriter w = new IndexWriter(dir, iwc); var tw = new DirectoryTaxonomyWriter(taxoDir); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: //int ordLimit = TestNightly ? 100000 : 6000; // LUCENENET specific: 100000 facets takes about 2-3 hours. To keep it under // the 1 hour free limit of Azure DevOps, this was reduced to 30000. int ordLimit = TestNightly ? 30000 : 6000; var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop); var mgr = new SearcherTaxonomyManager(w, true, null, tw); var reopener = new ThreadAnonymousClass(stop, mgr); reopener.Name = "reopener"; reopener.Start(); indexer.Name = "indexer"; indexer.Start(); try { while (!stop) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.Searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.TaxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.Searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.IsTrue(result.ChildCount > 0); Assert.IsTrue(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); reopener.Join(); } if (Verbose) { Console.WriteLine("TEST: now stop"); } IOUtils.Dispose(mgr, tw, w, taxoDir, dir); }