/// <summary> Sets up the FilterManager singleton.</summary> protected internal FilterManager() { cache = new Support.Dictionary <int, FilterItem>(); cacheCleanSize = DEFAULT_CACHE_CLEAN_SIZE; // Let the cache get to 100 items cleanSleepTime = DEFAULT_CACHE_SLEEP_TIME; // 10 minutes between cleanings filterCleaner = new FilterCleaner(this); Support.ThreadClass fcThread = new Support.ThreadClass(new System.Threading.ThreadStart(filterCleaner.Run)); // setto be a Daemon so it doesn't have to be stopped fcThread.IsBackground = true; fcThread.Start(); }
public void Test() { Support.ThreadClass thread = new Support.ThreadClass(); //Compare Current Thread Ids Assert.IsTrue(Support.ThreadClass.Current().Instance.ManagedThreadId == System.Threading.Thread.CurrentThread.ManagedThreadId); //Compare instances of ThreadClass MyThread mythread = new MyThread(); mythread.Start(); while (mythread.Result == null) System.Threading.Thread.Sleep(1); Assert.IsTrue((bool)mythread.Result); Support.ThreadClass nullThread = null; Assert.IsTrue(nullThread == null); //test overloaded operator == with null values Assert.IsFalse(nullThread != null); //test overloaded operator != with null values }
public virtual void TestRAMDirectorySize() { MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName); IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED); writer.Optimize(); Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes()); Support.ThreadClass[] threads = new Support.ThreadClass[numThreads]; for (int i = 0; i < numThreads; i++) { int num = i; threads[i] = new AnonymousClassThread(num, writer, ramDir, this); } for (int i = 0; i < numThreads; i++) threads[i].Start(); for (int i = 0; i < numThreads; i++) threads[i].Join(); writer.Optimize(); Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes()); writer.Close(); }
private void InitBlock(TestIndexWriterReader enclosingInstance) { this.enclosingInstance = enclosingInstance; threads = new Support.ThreadClass[NUM_THREADS]; }
public virtual void TestThreadSafety() { rnd = NewRandom(); int numThreads = 5; int numDocs = 50; ByteArrayPool pool = new ByteArrayPool(numThreads, 5); Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); System.String field = "test"; Support.ThreadClass[] ingesters = new Support.ThreadClass[numThreads]; for (int i = 0; i < numThreads; i++) { ingesters[i] = new AnonymousClassThread(numDocs, field, pool, writer, this); ingesters[i].Start(); } for (int i = 0; i < numThreads; i++) { ingesters[i].Join(); } writer.Close(); IndexReader reader = IndexReader.Open(dir); TermEnum terms = reader.Terms(); while (terms.Next()) { TermPositions tp = reader.TermPositions(terms.Term()); while (tp.Next()) { int freq = tp.Freq(); for (int i = 0; i < freq; i++) { tp.NextPosition(); Assert.AreEqual(pool.BytesToString(tp.GetPayload(new byte[5], 0)), terms.Term().text_ForNUnit); } } tp.Close(); } terms.Close(); reader.Close(); Assert.AreEqual(pool.Size(), numThreads); }
private void DoTestMultiThreads(bool withTimeout) { Support.ThreadClass[] threadArray = new Support.ThreadClass[N_THREADS]; System.Collections.BitArray success = new System.Collections.BitArray((N_THREADS % 64 == 0?N_THREADS / 64:N_THREADS / 64 + 1) * 64); for (int i = 0; i < threadArray.Length; ++i) { int num = i; threadArray[num] = new AnonymousClassThread(withTimeout, success, num, this); } for (int i = 0; i < threadArray.Length; ++i) { threadArray[i].Start(); } for (int i = 0; i < threadArray.Length; ++i) { threadArray[i].Join(); } Assert.AreEqual(N_THREADS, Support.BitSetSupport.Cardinality(success), "some threads failed!"); }
public virtual void Init(IndexReader reader) { this.reader = reader; timeElapsed = 0; t = new Support.ThreadClass(new System.Threading.ThreadStart(this.Run)); t.Start(); }
public virtual void runTest(Directory directory, bool autoCommit, MergeScheduler merger) { IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true); writer.SetMaxBufferedDocs(2); if (merger != null) writer.SetMergeScheduler(merger); for (int iter = 0; iter < NUM_ITER; iter++) { int iterFinal = iter; writer.SetMergeFactor(1000); for (int i = 0; i < 200; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); writer.AddDocument(d); } writer.SetMergeFactor(4); //writer.setInfoStream(System.out); int docCount = writer.DocCount(); Support.ThreadClass[] threads = new Support.ThreadClass[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { int iFinal = i; IndexWriter writerFinal = writer; threads[i] = new AnonymousClassThread(writerFinal, iFinal, iterFinal, this); } for (int i = 0; i < NUM_THREADS; i++) threads[i].Start(); for (int i = 0; i < NUM_THREADS; i++) threads[i].Join(); Assert.IsTrue(!failed); int expectedDocCount = (int) ((1 + iter) * (200 + 8 * NUM_ITER2 * (NUM_THREADS / 2.0) * (1 + NUM_THREADS))); // System.out.println("TEST: now index=" + writer.segString()); Assert.AreEqual(expectedDocCount, writer.DocCount()); if (!autoCommit) { writer.Close(); writer = new IndexWriter(directory, autoCommit, ANALYZER, false); writer.SetMaxBufferedDocs(2); } IndexReader reader = IndexReader.Open(directory); Assert.IsTrue(reader.IsOptimized()); Assert.AreEqual(expectedDocCount, reader.NumDocs()); reader.Close(); } writer.Close(); }