public virtual void TestUpdateSameDoc() { Directory dir = NewDirectory(); LineFileDocs docs = new LineFileDocs(Random()); for (int r = 0; r < 3; r++) { IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2)); int numUpdates = AtLeast(20); int numThreads = TestUtil.NextInt(Random(), 2, 6); IndexingThread[] threads = new IndexingThread[numThreads]; for (int i = 0; i < numThreads; i++) { threads[i] = new IndexingThread(docs, w, numUpdates, NewStringField); threads[i].Start(); } for (int i = 0; i < numThreads; i++) { threads[i].Join(); } w.Dispose(); } IndexReader open = DirectoryReader.Open(dir); Assert.AreEqual(1, open.NumDocs); open.Dispose(); docs.Dispose(); dir.Dispose(); }
public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir) { System.Collections.Hashtable docs = new System.Collections.Hashtable(); IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true); w.SetUseCompoundFile(false); /*** * w.setMaxMergeDocs(Integer.MAX_VALUE); * w.setMaxFieldLength(10000); * w.setRAMBufferSizeMB(1); * w.setMergeFactor(10); ***/ // force many merges w.SetMergeFactor(mergeFactor); w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); //w.close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { SupportClass.CollectionsHelper.AddAllIfNotContains(docs, th.docs); } } _TestUtil.CheckIndex(dir); DocsAndWriter dw = new DocsAndWriter(); dw.docs = docs; dw.writer = w; return(dw); }
public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir) { IDictionary <string, Document> docs = new Dictionary <string, Document>(); IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(maxBufferedDocs).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this)); w.Commit(); LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy; lmp.NoCFSRatio = 0.0; lmp.MergeFactor = mergeFactor; /* * /// w.setMaxMergeDocs(Integer.MAX_VALUE); * /// w.setMaxFieldLength(10000); * /// w.SetRAMBufferSizeMB(1); * /// w.setMergeFactor(10); */ threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(this); th.w = w; th.@base = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.ForceMerge(1); //w.Dispose(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { docs.PutAll(th.docs); } } TestUtil.CheckIndex(dir); DocsAndWriter dw = new DocsAndWriter(); dw.docs = docs; dw.writer = w; return(dw); }
public virtual System.Collections.IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir) { System.Collections.IDictionary docs = new System.Collections.Hashtable(); for (int iter = 0; iter < 3; iter++) { IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true); w.SetUseCompoundFile(false); // force many merges w.SetMergeFactor(mergeFactor); w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); w.Close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator(); while (e.MoveNext()) { docs[e.Current] = th.docs[e.Current]; } } } } _TestUtil.CheckIndex(dir); return(docs); }
public virtual IDictionary <string, Document> IndexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates, bool doReaderPooling) { IDictionary <string, Document> docs = new Dictionary <string, Document>(); IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(maxBufferedDocs).SetIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates)).SetReaderPooling(doReaderPooling).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this)); LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy; lmp.NoCFSRatio = 0.0; lmp.MergeFactor = mergeFactor; threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(this); th.w = w; th.@base = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } //w.ForceMerge(1); w.Dispose(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; UninterruptableMonitor.Enter(th); try { docs.PutAll(th.docs); } finally { UninterruptableMonitor.Exit(th); } } //System.out.println("TEST: checkindex"); TestUtil.CheckIndex(dir); return(docs); }
public virtual System.Collections.IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir) { System.Collections.IDictionary docs = new System.Collections.Hashtable(); for (int iter = 0; iter < 3; iter++) { IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true); w.SetUseCompoundFile(false); // force many merges w.SetMergeFactor(mergeFactor); w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); w.Close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator(); while (e.MoveNext()) { docs[e.Current] = th.docs[e.Current]; } } } } _TestUtil.CheckIndex(dir); return docs; }
public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir) { System.Collections.Hashtable docs = new System.Collections.Hashtable(); IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true); w.SetUseCompoundFile(false); /*** w.setMaxMergeDocs(Integer.MAX_VALUE); w.setMaxFieldLength(10000); w.setRAMBufferSizeMB(1); w.setMergeFactor(10); ***/ // force many merges w.SetMergeFactor(mergeFactor); w.SetRAMBufferSizeMB(.1); w.SetMaxBufferedDocs(maxBufferedDocs); threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { IndexingThread th = new IndexingThread(); th.w = w; th.base_Renamed = 1000000 * i; th.range = range; th.iterations = iterations; threads[i] = th; } for (int i = 0; i < threads.Length; i++) { threads[i].Start(); } for (int i = 0; i < threads.Length; i++) { threads[i].Join(); } // w.optimize(); //w.close(); for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; lock (th) { SupportClass.CollectionsHelper.AddAllIfNotContains(docs, th.docs); } } _TestUtil.CheckIndex(dir); DocsAndWriter dw = new DocsAndWriter(); dw.docs = docs; dw.writer = w; return dw; }