public virtual void TestRandomExceptionsThreads() { random = new Random((int)(DateTime.Now.Ticks & 0x7fffffff)); MockRAMDirectory dir = new MockRAMDirectory(); MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); ((ConcurrentMergeScheduler)writer.MergeScheduler).SetSuppressExceptions(); //writer.setMaxBufferedDocs(10); writer.SetRAMBufferSizeMB(0.2); if (DEBUG) { System.IO.StreamWriter temp_writer; temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding); temp_writer.AutoFlush = true; writer.SetInfoStream(temp_writer, null); } int NUM_THREADS = 4; IndexerThread[] threads = new IndexerThread[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { threads[i] = new IndexerThread(this, i, writer); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } for (int i = 0; i < NUM_THREADS; i++) { Assert.IsNull(threads[i].failure, "thread " + threads[i].Name + ": hit unexpected failure"); } writer.Commit(null); try { writer.Close(); } catch (System.Exception t) { System.Console.Out.WriteLine("exception during close:"); System.Console.Out.WriteLine(t.StackTrace); writer.Rollback(null); } // Confirm that when doc hits exception partway through tokenization, it's deleted: IndexReader r2 = IndexReader.Open((Directory)dir, true, null); int count = r2.DocFreq(new Term("content4", "aaa"), null); int count2 = r2.DocFreq(new Term("content4", "ddd"), null); Assert.AreEqual(count, count2); r2.Close(); _TestUtil.CheckIndex(dir); }
/* * Run one indexer and 2 searchers against single index as * stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new MockIndexWriter(this, directory, true, ANALYZER, true); writer.SetMaxBufferedDocs(7); writer.SetMergeFactor(3); // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); writer.AddDocument(d); } writer.Commit(); IndexReader r = IndexReader.Open(directory); Assert.AreEqual(100, r.NumDocs()); r.Close(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual System.Collections.IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir) { System.Collections.IDictionary docs = new System.Collections.Hashtable(); for (int iter = 0; iter < 3; iter++) { IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true); w.SetUseCompoundFile(false); // force many merges w.SetMergeFactor(mergeFactor); w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); w.Close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator(); while (e.MoveNext()) { docs[e.Current] = th.docs[e.Current]; } } } } _TestUtil.CheckIndex(dir); return(docs); }
public virtual void TestRandomExceptions() { MockRAMDirectory dir = new MockRAMDirectory(); MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).SetSuppressExceptions(); //writer.setMaxBufferedDocs(10); writer.SetRAMBufferSizeMB(0.1); if (DEBUG) { System.IO.StreamWriter temp_writer; temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding); temp_writer.AutoFlush = true; writer.SetInfoStream(temp_writer); } IndexerThread thread = new IndexerThread(this, 0, writer); thread.Run(); if (thread.failure != null) { System.Console.Out.WriteLine(thread.failure.StackTrace); Assert.Fail("thread " + thread.Name + ": hit unexpected failure"); } writer.Commit(); try { writer.Close(); } catch (System.Exception t) { System.Console.Out.WriteLine("exception during close:"); System.Console.Out.WriteLine(t.StackTrace); writer.Rollback(); } // Confirm that when doc hits exception partway through tokenization, it's deleted: IndexReader r2 = IndexReader.Open(dir); int count = r2.DocFreq(new Term("content4", "aaa")); int count2 = r2.DocFreq(new Term("content4", "ddd")); Assert.AreEqual(count, count2); r2.Close(); _TestUtil.CheckIndex(dir); }
/* Run one indexer and 2 searchers against single index as stress test. */ public virtual void RunTest(Directory directory) { TimedThread[] threads = new TimedThread[4]; IndexWriter writer = new MockIndexWriter(this, directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED); writer.SetMaxBufferedDocs(7); writer.MergeFactor = 3; // Establish a base index of 100 docs: for (int i = 0; i < 100; i++) { Document d = new Document(); d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED)); d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.ANALYZED)); if ((i - 1) % 7 == 0) { writer.Commit(); } writer.AddDocument(d); } writer.Commit(); IndexReader r = IndexReader.Open(directory, true); Assert.AreEqual(100, r.NumDocs()); r.Close(); IndexerThread indexerThread = new IndexerThread(writer, threads); threads[0] = indexerThread; indexerThread.Start(); IndexerThread indexerThread2 = new IndexerThread(writer, threads); threads[1] = indexerThread2; indexerThread2.Start(); SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[2] = searcherThread1; searcherThread1.Start(); SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[3] = searcherThread2; searcherThread2.Start(); indexerThread.Join(); indexerThread2.Join(); searcherThread1.Join(); searcherThread2.Join(); writer.Close(); Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer"); Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2"); Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1"); Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2"); //System.out.println(" Writer: " + indexerThread.count + " iterations"); //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created"); //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created"); }
public virtual System.Collections.IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir) { System.Collections.IDictionary docs = new System.Collections.Hashtable(); for (int iter = 0; iter < 3; iter++) { IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true); w.SetUseCompoundFile(false); // force many merges w.SetMergeFactor(mergeFactor); w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); w.Close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator(); while (e.MoveNext()) { docs[e.Current] = th.docs[e.Current]; } } } } _TestUtil.CheckIndex(dir); return docs; }
public void TestExceptionJustBeforeFlush() { MockRAMDirectory dir = new MockRAMDirectory(); MockIndexWriter w = new MockIndexWriter(dir, false, new WhitespaceAnalyzer(), true); w.SetMaxBufferedDocs(2); Document doc = new Document(); doc.Add(new Field("field", "a field", Field.Store.YES, Field.Index.TOKENIZED)); w.AddDocument(doc); Analyzer analyzer = new AnonymousClassAnalyzer3(this); Document crashDoc = new Document(); crashDoc.Add(new Field("crash", "do it on token 4", Field.Store.YES, Field.Index.TOKENIZED)); try { w.AddDocument(crashDoc, analyzer); Assert.Fail("did not hit expected exception"); } catch (System.IO.IOException) { // expected } w.AddDocument(doc); w.Close(); dir.Close(); }
public void TestExceptionDocumentsWriterInit() { MockRAMDirectory dir = new MockRAMDirectory(); MockIndexWriter w = new MockIndexWriter(dir, false, new WhitespaceAnalyzer(), true); Document doc = new Document(); doc.Add(new Field("field", "a field", Field.Store.YES, Field.Index.TOKENIZED)); w.AddDocument(doc); w.doFail = true; try { w.AddDocument(doc); Assert.Fail("did not hit exception"); } catch (System.Exception) { // expected } w.Close(); _TestUtil.CheckIndex(dir); dir.Close(); }
public virtual void TestRandomExceptionsThreads() { MockRAMDirectory dir = new MockRAMDirectory(); MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); ((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions(); //writer.setMaxBufferedDocs(10); writer.SetRAMBufferSizeMB(0.2); if (DEBUG) { System.IO.StreamWriter temp_writer; temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding); temp_writer.AutoFlush = true; writer.SetInfoStream(temp_writer); } int NUM_THREADS = 4; IndexerThread[] threads = new IndexerThread[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { threads[i] = new IndexerThread(this, i, writer); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) threads[i].Join(); for (int i = 0; i < NUM_THREADS; i++) if (threads[i].failure != null) Assert.Fail("thread " + threads[i].Name + ": hit unexpected failure"); writer.Commit(); try { writer.Close(); } catch (System.Exception t) { System.Console.Out.WriteLine("exception during close:"); System.Console.Out.WriteLine(t.StackTrace); writer.Rollback(); } // Confirm that when doc hits exception partway through tokenization, it's deleted: IndexReader r2 = IndexReader.Open(dir); int count = r2.DocFreq(new Term("content4", "aaa")); int count2 = r2.DocFreq(new Term("content4", "ddd")); Assert.AreEqual(count, count2); r2.Close(); _TestUtil.CheckIndex(dir); }
public virtual void TestExceptionDocumentsWriterInit() { MockRAMDirectory dir = new MockRAMDirectory(); MockIndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED); Document doc = new Document(); doc.Add(new Field("field", "a field", Field.Store.YES, Field.Index.ANALYZED)); w.AddDocument(doc); w.doFail = true; try { w.AddDocument(doc); if (SupportClass.BuildType.Debug) Assert.Fail("did not hit exception"); else Assert.Ignore("This test is not executed in release mode"); } catch (System.SystemException re) { // expected } w.Close(); _TestUtil.CheckIndex(dir); dir.Close(); }