/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new MockIndexWriter(this, directory, true, ANALYZER, true); writer.SetMaxBufferedDocs(7); writer.SetMergeFactor(3); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); writer.AddDocument(d); } writer.Commit(); IndexReader r = IndexReader.Open(directory); Assert.AreEqual(100, r.NumDocs()); r.Close(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public static void Main(System.String[] args) { bool readOnly = false; bool add = false; for (int i = 0; i < args.Length; i++) { if ("-ro".Equals(args[i])) { readOnly = true; } if ("-add".Equals(args[i])) { add = true; } } System.IO.FileInfo indexDir = new System.IO.FileInfo("index"); bool tmpBool; if (System.IO.File.Exists(indexDir.FullName)) { tmpBool = true; } else { tmpBool = System.IO.Directory.Exists(indexDir.FullName); } if (!tmpBool) { System.IO.Directory.CreateDirectory(indexDir.FullName); } IndexReader.Unlock(FSDirectory.GetDirectory(indexDir, false)); if (!readOnly) { IndexWriter writer = new IndexWriter(indexDir, ANALYZER, !add); SupportClass.ThreadClass indexerThread = new IndexerThread(writer); indexerThread.Start(); System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } SearcherThread searcherThread1 = new SearcherThread(false); searcherThread1.Start(); SEARCHER = new IndexSearcher(indexDir.ToString()); SearcherThread searcherThread2 = new SearcherThread(true); searcherThread2.Start(); SearcherThread searcherThread3 = new SearcherThread(true); searcherThread3.Start(); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(7); ((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3; IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random()); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Documents.Document d = new Documents.Document(); d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES)); d.Add(NewTextField("contents", English.IntToEnglish(i), Field.Store.NO)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = DirectoryReader.Open(directory); Assert.AreEqual(100, r.NumDocs); r.Dispose(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Dispose(); Assert.IsTrue(!indexerThread.Failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.Failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.Failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.Failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Mem( [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")] Func <IConcurrentMergeScheduler> newScheduler1, [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")] Func <IConcurrentMergeScheduler> newScheduler2) { Console.WriteLine("Start test"); // we cant use non-ramdir on windows, because this test needs to double-write. MockDirectoryWrapper dir1 = new MockDirectoryWrapper(Random(), new RAMDirectory()); MockDirectoryWrapper dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory()); dir1.PreventDoubleWrite = false; dir2.PreventDoubleWrite = false; dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); dir1.FailOnOpenInput = false; dir2.FailOnOpenInput = false; // We throw exceptions in deleteFile, which creates // leftover files: dir1.AssertNoUnrefencedFilesOnClose = false; dir2.AssertNoUnrefencedFilesOnClose = false; InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, newScheduler1, newScheduler2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].Failed); } dir1.Dispose(); dir2.Dispose(); Console.WriteLine("End test"); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunStressTest(Directory directory, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null); modifier.SetMaxBufferedDocs(10); TimedThread[] threads = new TimedThread[4]; int numThread = 0; if (mergeScheduler != null) { modifier.SetMergeScheduler(mergeScheduler, null); } // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } modifier.Close(); for (int i = 0; i < numThread; i++) { Assert.IsTrue(!((TimedThread)threads[i]).failed); } //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunStressTest(Directory directory, IConcurrentMergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10).SetMergeScheduler(mergeScheduler)); modifier.Commit(); TimedThread[] threads = new TimedThread[4]; int numThread = 0; // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(modifier, threads, NewStringField, NewTextField); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(modifier, threads, NewStringField, NewTextField); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads, this); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads, this); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } modifier.Dispose(); for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].failed); } //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunStressTest(Directory directory, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(10).SetMergeScheduler(mergeScheduler)); modifier.Commit(); TimedThread[] threads = new TimedThread[4]; int numThread = 0; // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } modifier.Dispose(); for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].Failed); } //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Rename() { RANDOM = NewRandom(); MockRAMDirectory dir1 = new MockRAMDirectory(); MockRAMDirectory dir2 = new MockRAMDirectory(); dir1.SetPreventDoubleWrite(false); dir2.SetPreventDoubleWrite(false); dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } for (int i = 0; i < numThread; i++) { Assert.IsTrue(!((TimedThread)threads[i]).failed); } }
public virtual void TestTransactions_Rename() { RANDOM = NewRandom(); MockRAMDirectory dir1 = new MockRAMDirectory(); MockRAMDirectory dir2 = new MockRAMDirectory(); dir1.SetPreventDoubleWrite(false); dir2.SetPreventDoubleWrite(false); dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) threads[i].Join(); for (int i = 0; i < numThread; i++) Assert.IsTrue(!((TimedThread) threads[i]).failed); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new MockIndexWriter(this, directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED); writer.SetMaxBufferedDocs(7); writer.MergeFactor = 3; // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = IndexReader.Open(directory, true); Assert.AreEqual(100, r.NumDocs()); r.Close(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunStressTest(Directory directory, bool autoCommit, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true); modifier.SetMaxBufferedDocs(10); TimedThread[] threads = new TimedThread[4]; int numThread = 0; if (mergeScheduler != null) modifier.SetMergeScheduler(mergeScheduler); // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) threads[i].Join(); modifier.Close(); for (int i = 0; i < numThread; i++) Assert.IsTrue(!((TimedThread) threads[i]).failed); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Mem() { Console.WriteLine("Start test"); // we cant use non-ramdir on windows, because this test needs to double-write. MockDirectoryWrapper dir1 = new MockDirectoryWrapper(Random(), new RAMDirectory()); MockDirectoryWrapper dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory()); dir1.PreventDoubleWrite = false; dir2.PreventDoubleWrite = false; dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); dir1.FailOnOpenInput = false; dir2.FailOnOpenInput = false; // We throw exceptions in deleteFile, which creates // leftover files: dir1.AssertNoUnrefencedFilesOnClose = false; dir2.AssertNoUnrefencedFilesOnClose = false; InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].Failed); } dir1.Dispose(); dir2.Dispose(); Console.WriteLine("End test"); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(7); ((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3; IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random()); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Documents.Document d = new Documents.Document(); d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES)); d.Add(NewTextField("contents", English.IntToEnglish(i), Field.Store.NO)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = DirectoryReader.Open(directory); Assert.AreEqual(100, r.NumDocs); r.Dispose(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Dispose(); Assert.IsTrue(!indexerThread.Failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.Failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.Failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.Failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public static void Main(System.String[] args) { bool readOnly = false; bool add = false; for (int i = 0; i < args.Length; i++) { if ("-ro".Equals(args[i])) readOnly = true; if ("-add".Equals(args[i])) add = true; } System.IO.FileInfo indexDir = new System.IO.FileInfo("index"); bool tmpBool; if (System.IO.File.Exists(indexDir.FullName)) tmpBool = true; else tmpBool = System.IO.Directory.Exists(indexDir.FullName); if (!tmpBool) { System.IO.Directory.CreateDirectory(indexDir.FullName); } IndexReader.Unlock(FSDirectory.GetDirectory(indexDir, false)); if (!readOnly) { IndexWriter writer = new IndexWriter(indexDir, ANALYZER, !add); SupportClass.ThreadClass indexerThread = new IndexerThread(writer); indexerThread.Start(); System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } SearcherThread searcherThread1 = new SearcherThread(false); searcherThread1.Start(); SEARCHER = new IndexSearcher(indexDir.ToString()); SearcherThread searcherThread2 = new SearcherThread(true); searcherThread2.Start(); SearcherThread searcherThread3 = new SearcherThread(true); searcherThread3.Start(); }
public virtual void Test_Directory() // LUCENENET specific - name collides with property of LuceneTestCase { Store.Directory indexDir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); var tw = new DirectoryTaxonomyWriter(taxoDir); // first empty commit w.Commit(); tw.Commit(); var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: //int ordLimit = TestNightly ? 100000 : 6000; // LUCENENET specific: 100000 facets takes about 2-3 hours. To keep it under // the 1 hour free limit of Azure DevOps, this was reduced to 30000. int ordLimit = TestNightly ? 30000 : 6000; var indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop); indexer.Start(); try { while (!stop) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.Searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.TaxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.Searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.IsTrue(result.ChildCount > 0); Assert.IsTrue(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); } if (Verbose) { Console.WriteLine("TEST: now stop"); } IOUtils.Dispose(mgr, tw, w, taxoDir, indexDir); }
public virtual void TestNrt() { Store.Directory dir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); // Don't allow tiny maxBufferedDocs; it can make this // test too slow: iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs)); // MockRandom/AlcololicMergePolicy are too slow: TieredMergePolicy tmp = new TieredMergePolicy(); tmp.FloorSegmentMB = .001; iwc.SetMergePolicy(tmp); IndexWriter w = new IndexWriter(dir, iwc); var tw = new DirectoryTaxonomyWriter(taxoDir); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: int ordLimit = TEST_NIGHTLY ? 100000 : 6000; var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop); var mgr = new SearcherTaxonomyManager(w, true, null, tw); var reopener = new ThreadAnonymousInnerClassHelper(this, stop, mgr); reopener.Name = "reopener"; reopener.Start(); indexer.Name = "indexer"; indexer.Start(); try { while (!stop.Get()) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.True(result.ChildCount > 0); Assert.True(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); reopener.Join(); } if (VERBOSE) { Console.WriteLine("TEST: now stop"); } IOUtils.Close(mgr, tw, w, taxoDir, dir); }
public virtual void TestDirectory() { Store.Directory indexDir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); var tw = new DirectoryTaxonomyWriter(taxoDir); // first empty commit w.Commit(); tw.Commit(); var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: int ordLimit = TEST_NIGHTLY ? 100000 : 6000; var indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop); indexer.Start(); try { while (!stop.Get()) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.True(result.ChildCount > 0); Assert.True(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); } if (VERBOSE) { Console.WriteLine("TEST: now stop"); } IOUtils.Close(mgr, tw, w, taxoDir, indexDir); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunStressTest(Directory directory, bool autoCommit, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true); modifier.SetMaxBufferedDocs(10); TimedThread[] threads = new TimedThread[4]; if (mergeScheduler != null) modifier.SetMergeScheduler(mergeScheduler); // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(modifier, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(modifier, threads); threads[2] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[3] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); modifier.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new IndexWriter(directory, ANALYZER, true); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED)); writer.AddDocument(d); } writer.Flush(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }