public virtual void _testStressLocks(LockFactory lockFactory, DirectoryInfo indexDir) { Directory dir = NewFSDirectory(indexDir, lockFactory); // First create a 1 doc index: IndexWriter w = new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE)); AddDoc(w); w.Dispose(); WriterThread writer = new WriterThread(this, 100, dir); SearcherThread searcher = new SearcherThread(this, 100, dir); writer.Start(); searcher.Start(); while (writer.IsAlive || searcher.IsAlive) { Thread.Sleep(1000); } Assert.IsTrue(!writer.HitException, "IndexWriter hit unexpected exceptions"); Assert.IsTrue(!searcher.HitException, "IndexSearcher hit unexpected exceptions"); dir.Dispose(); // Cleanup System.IO.Directory.Delete(indexDir.FullName, true); }
public virtual void _testStressLocks(LockFactory lockFactory, System.IO.FileInfo indexDir) { FSDirectory fs1 = FSDirectory.Open(new System.IO.DirectoryInfo(indexDir.FullName), lockFactory); // First create a 1 doc index: IndexWriter w = new IndexWriter(fs1, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); AddDoc(w); w.Close(); WriterThread writer = new WriterThread(this, 100, fs1); SearcherThread searcher = new SearcherThread(this, 100, fs1); writer.Start(); searcher.Start(); while (writer.IsAlive || searcher.IsAlive) { System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } Assert.IsTrue(!writer.hitException, "IndexWriter hit unexpected exceptions"); Assert.IsTrue(!searcher.hitException, "IndexSearcher hit unexpected exceptions"); // Cleanup _TestUtil.RmDir(indexDir); }
public static void Main(System.String[] args) { bool readOnly = false; bool add = false; for (int i = 0; i < args.Length; i++) { if ("-ro".Equals(args[i])) { readOnly = true; } if ("-add".Equals(args[i])) { add = true; } } System.IO.FileInfo indexDir = new System.IO.FileInfo("index"); bool tmpBool; if (System.IO.File.Exists(indexDir.FullName)) { tmpBool = true; } else { tmpBool = System.IO.Directory.Exists(indexDir.FullName); } if (!tmpBool) { System.IO.Directory.CreateDirectory(indexDir.FullName); } IndexReader.Unlock(FSDirectory.GetDirectory(indexDir, false)); if (!readOnly) { IndexWriter writer = new IndexWriter(indexDir, ANALYZER, !add); SupportClass.ThreadClass indexerThread = new IndexerThread(writer); indexerThread.Start(); System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } SearcherThread searcherThread1 = new SearcherThread(false); searcherThread1.Start(); SEARCHER = new IndexSearcher(indexDir.ToString()); SearcherThread searcherThread2 = new SearcherThread(true); searcherThread2.Start(); SearcherThread searcherThread3 = new SearcherThread(true); searcherThread3.Start(); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new MockIndexWriter(this, directory, true, ANALYZER, true); writer.SetMaxBufferedDocs(7); writer.SetMergeFactor(3); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); writer.AddDocument(d); } writer.Commit(); IndexReader r = IndexReader.Open(directory); Assert.AreEqual(100, r.NumDocs()); r.Close(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(7); ((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3; IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random()); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Documents.Document d = new Documents.Document(); d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES)); d.Add(NewTextField("contents", English.IntToEnglish(i), Field.Store.NO)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = DirectoryReader.Open(directory); Assert.AreEqual(100, r.NumDocs); r.Dispose(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Dispose(); Assert.IsTrue(!indexerThread.Failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.Failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.Failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.Failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Mem( [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")] Func <IConcurrentMergeScheduler> newScheduler1, [ValueSource(typeof(ConcurrentMergeSchedulerFactories), "Values")] Func <IConcurrentMergeScheduler> newScheduler2) { Console.WriteLine("Start test"); // we cant use non-ramdir on windows, because this test needs to double-write. MockDirectoryWrapper dir1 = new MockDirectoryWrapper(Random(), new RAMDirectory()); MockDirectoryWrapper dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory()); dir1.PreventDoubleWrite = false; dir2.PreventDoubleWrite = false; dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); dir1.FailOnOpenInput = false; dir2.FailOnOpenInput = false; // We throw exceptions in deleteFile, which creates // leftover files: dir1.AssertNoUnrefencedFilesOnClose = false; dir2.AssertNoUnrefencedFilesOnClose = false; InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, newScheduler1, newScheduler2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].Failed); } dir1.Dispose(); dir2.Dispose(); Console.WriteLine("End test"); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunStressTest(Directory directory, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED, null); modifier.SetMaxBufferedDocs(10); TimedThread[] threads = new TimedThread[4]; int numThread = 0; if (mergeScheduler != null) { modifier.SetMergeScheduler(mergeScheduler, null); } // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } modifier.Close(); for (int i = 0; i < numThread; i++) { Assert.IsTrue(!((TimedThread)threads[i]).failed); } //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunStressTest(Directory directory, IConcurrentMergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(10).SetMergeScheduler(mergeScheduler)); modifier.Commit(); TimedThread[] threads = new TimedThread[4]; int numThread = 0; // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(modifier, threads, NewStringField, NewTextField); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(modifier, threads, NewStringField, NewTextField); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads, this); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads, this); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } modifier.Dispose(); for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].failed); } //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunStressTest(Directory directory, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.CREATE).SetMaxBufferedDocs(10).SetMergeScheduler(mergeScheduler)); modifier.Commit(); TimedThread[] threads = new TimedThread[4]; int numThread = 0; // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } modifier.Dispose(); for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].Failed); } //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Rename() { RANDOM = NewRandom(); MockRAMDirectory dir1 = new MockRAMDirectory(); MockRAMDirectory dir2 = new MockRAMDirectory(); dir1.SetPreventDoubleWrite(false); dir2.SetPreventDoubleWrite(false); dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } for (int i = 0; i < numThread; i++) { Assert.IsTrue(!((TimedThread)threads[i]).failed); } }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new MockIndexWriter(this, directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED); writer.SetMaxBufferedDocs(7); writer.MergeFactor = 3; // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = IndexReader.Open(directory, true); Assert.AreEqual(100, r.NumDocs()); r.Close(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunStressTest(Directory directory, bool autoCommit, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true); modifier.SetMaxBufferedDocs(10); TimedThread[] threads = new TimedThread[4]; int numThread = 0; if (mergeScheduler != null) modifier.SetMergeScheduler(mergeScheduler); // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(this, modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) threads[i].Join(); modifier.Close(); for (int i = 0; i < numThread; i++) Assert.IsTrue(!((TimedThread) threads[i]).failed); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Mem() { Console.WriteLine("Start test"); // we cant use non-ramdir on windows, because this test needs to double-write. MockDirectoryWrapper dir1 = new MockDirectoryWrapper(Random(), new RAMDirectory()); MockDirectoryWrapper dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory()); dir1.PreventDoubleWrite = false; dir2.PreventDoubleWrite = false; dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); dir1.FailOnOpenInput = false; dir2.FailOnOpenInput = false; // We throw exceptions in deleteFile, which creates // leftover files: dir1.AssertNoUnrefencedFilesOnClose = false; dir2.AssertNoUnrefencedFilesOnClose = false; InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) { threads[i].Join(); } for (int i = 0; i < numThread; i++) { Assert.IsTrue(!threads[i].Failed); } dir1.Dispose(); dir2.Dispose(); Console.WriteLine("End test"); }
public static void Main(System.String[] args) { bool readOnly = false; bool add = false; for (int i = 0; i < args.Length; i++) { if ("-ro".Equals(args[i])) readOnly = true; if ("-add".Equals(args[i])) add = true; } System.IO.FileInfo indexDir = new System.IO.FileInfo("index"); bool tmpBool; if (System.IO.File.Exists(indexDir.FullName)) tmpBool = true; else tmpBool = System.IO.Directory.Exists(indexDir.FullName); if (!tmpBool) { System.IO.Directory.CreateDirectory(indexDir.FullName); } IndexReader.Unlock(FSDirectory.GetDirectory(indexDir, false)); if (!readOnly) { IndexWriter writer = new IndexWriter(indexDir, ANALYZER, !add); SupportClass.ThreadClass indexerThread = new IndexerThread(writer); indexerThread.Start(); System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } SearcherThread searcherThread1 = new SearcherThread(false); searcherThread1.Start(); SEARCHER = new IndexSearcher(indexDir.ToString()); SearcherThread searcherThread2 = new SearcherThread(true); searcherThread2.Start(); SearcherThread searcherThread3 = new SearcherThread(true); searcherThread3.Start(); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new IndexWriter(directory, ANALYZER, true); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED)); writer.AddDocument(d); } writer.Flush(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void TestTransactions_Rename() { RANDOM = NewRandom(); MockRAMDirectory dir1 = new MockRAMDirectory(); MockRAMDirectory dir2 = new MockRAMDirectory(); dir1.SetPreventDoubleWrite(false); dir2.SetPreventDoubleWrite(false); dir1.FailOn(new RandomFailure(this)); dir2.FailOn(new RandomFailure(this)); InitIndex(dir1); InitIndex(dir2); TimedThread[] threads = new TimedThread[3]; int numThread = 0; IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); for (int i = 0; i < numThread; i++) threads[i].Join(); for (int i = 0; i < numThread; i++) Assert.IsTrue(!((TimedThread) threads[i]).failed); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunStressTest(Directory directory, bool autoCommit, MergeScheduler mergeScheduler) { IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true); modifier.SetMaxBufferedDocs(10); TimedThread[] threads = new TimedThread[4]; if (mergeScheduler != null) modifier.SetMergeScheduler(mergeScheduler); // One modifier that writes 10 docs then removes 5, over // and over: IndexerThread indexerThread = new IndexerThread(modifier, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(modifier, threads); threads[2] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[3] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); modifier.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMaxBufferedDocs(7); ((TieredMergePolicy)conf.MergePolicy).MaxMergeAtOnce = 3; IndexWriter writer = RandomIndexWriter.MockIndexWriter(directory, conf, Random()); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Documents.Document d = new Documents.Document(); d.Add(NewStringField("id", Convert.ToString(i), Field.Store.YES)); d.Add(NewTextField("contents", English.IntToEnglish(i), Field.Store.NO)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = DirectoryReader.Open(directory); Assert.AreEqual(100, r.NumDocs); r.Dispose(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Dispose(); Assert.IsTrue(!indexerThread.Failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.Failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.Failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.Failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual void _TestStressLocks(LockFactory lockFactory, System.String indexDirName) { FSDirectory fs1 = FSDirectory.GetDirectory(indexDirName, lockFactory); // First create a 1 doc index: IndexWriter w = new IndexWriter(fs1, new WhitespaceAnalyzer(), true); AddDoc(w); w.Close(); WriterThread writer = new WriterThread(this, 100, fs1); SearcherThread searcher = new SearcherThread(this, 100, fs1); writer.Start(); searcher.Start(); while (writer.IsAlive || searcher.IsAlive) { try { System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000)); } catch (System.Threading.ThreadInterruptedException) { } } Assert.IsTrue(!writer.hitException, "IndexWriter hit unexpected exceptions"); Assert.IsTrue(!searcher.hitException, "IndexSearcher hit unexpected exceptions"); // Cleanup RmDir(indexDirName); }