public virtual void TestForceMergeNotNeeded() { Directory dir = NewDirectory(); AtomicBoolean mayMerge = new AtomicBoolean(true); MergeScheduler mergeScheduler = new SerialMergeSchedulerAnonymousInnerClassHelper(this, mayMerge); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(mergeScheduler).SetMergePolicy(MergePolicy())); writer.Config.MergePolicy.NoCFSRatio = Random().NextBoolean() ? 0 : 1; int numSegments = TestUtil.NextInt(Random(), 2, 20); for (int i = 0; i < numSegments; ++i) { int numDocs = TestUtil.NextInt(Random(), 1, 5); for (int j = 0; j < numDocs; ++j) { writer.AddDocument(new Document()); } writer.Reader.Dispose(); } for (int i = 5; i >= 0; --i) { int segmentCount = writer.SegmentCount; int maxNumSegments = i == 0 ? 1 : TestUtil.NextInt(Random(), 1, 10); mayMerge.Set(segmentCount > maxNumSegments); writer.ForceMerge(maxNumSegments); } writer.Dispose(); dir.Dispose(); }
public virtual void Test() { Directory d = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random()); analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH); MyIndexWriter w = new MyIndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); // Try to make an index that requires merging: w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 2, 11)); int numStartDocs = AtLeast(20); LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues()); for (int docIDX = 0; docIDX < numStartDocs; docIDX++) { w.AddDocument(docs.NextDoc()); } MergePolicy mp = w.Config.MergePolicy; int mergeAtOnce = 1 + w.GetSegmentInfosSize_Nunit(); if (mp is TieredMergePolicy) { ((TieredMergePolicy)mp).MaxMergeAtOnce = mergeAtOnce; } else if (mp is LogMergePolicy) { ((LogMergePolicy)mp).MergeFactor = mergeAtOnce; } else { // skip test w.Dispose(); d.Dispose(); return; } AtomicBoolean doStop = new AtomicBoolean(); w.Config.SetMaxBufferedDocs(2); ThreadClass t = new ThreadAnonymousInnerClassHelper(this, w, numStartDocs, docs, doStop); t.Start(); w.ForceMerge(1); doStop.Set(true); t.Join(); Assert.IsTrue(w.MergeCount.Get() <= 1, "merge count is " + w.MergeCount.Get()); w.Dispose(); d.Dispose(); docs.Dispose(); }
public ThreadAnonymousInnerClassHelper(TestIndexWriterCommit outerInstance, Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI) { this.OuterInstance = outerInstance; this.Dir = dir; this.w = w; this.Failed = failed; this.EndTime = endTime; this.FinalI = finalI; }
public virtual void TestCommitThreadSafety() { const int NUM_THREADS = 5; const double RUN_SEC = 0.5; Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy())); TestUtil.ReduceOpenFiles(w.w); w.Commit(); AtomicBoolean failed = new AtomicBoolean(); ThreadClass[] threads = new ThreadClass[NUM_THREADS]; long endTime = DateTime.Now.Millisecond + ((long)(RUN_SEC * 1000)); for (int i = 0; i < NUM_THREADS; i++) { int finalI = i; threads[i] = new ThreadAnonymousInnerClassHelper(this, dir, w, failed, endTime, finalI); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } Assert.IsFalse(failed.Get()); w.Dispose(); dir.Dispose(); }
public virtual void TestCustomDoublesValueSource() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); Document doc = new Document(); writer.AddDocument(doc); writer.AddDocument(doc); writer.AddDocument(doc); // Test wants 3 docs in one segment: writer.ForceMerge(1); var vs = new ValueSourceAnonymousInnerClassHelper(this, doc); FacetsConfig config = new FacetsConfig(); FacetsCollector fc = new FacetsCollector(); IndexReader r = writer.Reader; IndexSearcher s = NewSearcher(r); s.Search(new MatchAllDocsQuery(), fc); DoubleRange[] ranges = new DoubleRange[] { new DoubleRange("< 1", 0.0, true, 1.0, false), new DoubleRange("< 2", 0.0, true, 2.0, false), new DoubleRange("< 5", 0.0, true, 5.0, false), new DoubleRange("< 10", 0.0, true, 10.0, false), new DoubleRange("< 20", 0.0, true, 20.0, false), new DoubleRange("< 50", 0.0, true, 50.0, false) }; Filter fastMatchFilter; AtomicBoolean filterWasUsed = new AtomicBoolean(); if (Random().NextBoolean()) { // Sort of silly: fastMatchFilter = new CachingWrapperFilterAnonymousInnerClassHelper(this, new QueryWrapperFilter(new MatchAllDocsQuery()), filterWasUsed); } else { fastMatchFilter = null; } if (VERBOSE) { Console.WriteLine("TEST: fastMatchFilter=" + fastMatchFilter); } Facets facets = new DoubleRangeFacetCounts("field", vs, fc, fastMatchFilter, ranges); Assert.AreEqual("dim=field path=[] value=3 childCount=6\n < 1 (0)\n < 2 (1)\n < 5 (3)\n < 10 (3)\n < 20 (3)\n < 50 (3)\n", facets.GetTopChildren(10, "field").ToString()); Assert.True(fastMatchFilter == null || filterWasUsed.Get()); DrillDownQuery ddq = new DrillDownQuery(config); ddq.Add("field", ranges[1].GetFilter(fastMatchFilter, vs)); // Test simple drill-down: Assert.AreEqual(1, s.Search(ddq, 10).TotalHits); // Test drill-sideways after drill-down DrillSideways ds = new DrillSidewaysAnonymousInnerClassHelper2(this, s, config, (TaxonomyReader)null, vs, ranges, fastMatchFilter); DrillSidewaysResult dsr = ds.Search(ddq, 10); Assert.AreEqual(1, dsr.Hits.TotalHits); Assert.AreEqual("dim=field path=[] value=3 childCount=6\n < 1 (0)\n < 2 (1)\n < 5 (3)\n < 10 (3)\n < 20 (3)\n < 50 (3)\n", dsr.Facets.GetTopChildren(10, "field").ToString()); IOUtils.Close(r, writer, dir); }
public CachingWrapperFilterAnonymousInnerClassHelper(TestRangeFacetCounts outerInstance, QueryWrapperFilter org, AtomicBoolean filterWasUsed) : base(org) { this.outerInstance = outerInstance; this.filterWasUsed = filterWasUsed; }
public FailureAnonymousInnerClassHelper(TestIndexWriterReader outerInstance, MockDirectoryWrapper dir, AtomicBoolean shouldFail) { this.OuterInstance = outerInstance; this.Dir = dir; this.ShouldFail = shouldFail; }
public virtual void TestNRTOpenExceptions() { // LUCENE-5262: test that several failed attempts to obtain an NRT reader // don't leak file handles. MockDirectoryWrapper dir = (MockDirectoryWrapper)GetAssertNoDeletesDirectory(NewMockDirectory()); AtomicBoolean shouldFail = new AtomicBoolean(); dir.FailOn(new FailureAnonymousInnerClassHelper(this, dir, shouldFail)); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges from getting in the way IndexWriter writer = new IndexWriter(dir, conf); // create a segment and open an NRT reader writer.AddDocument(new Document()); writer.Reader.Dispose(); // add a new document so a new NRT reader is required writer.AddDocument(new Document()); // try to obtain an NRT reader twice: first time it fails and closes all the // other NRT readers. second time it fails, but also fails to close the // other NRT reader, since it is already marked closed! for (int i = 0; i < 2; i++) { shouldFail.Set(true); try { writer.Reader.Dispose(); } catch (FakeIOException e) { // expected if (VERBOSE) { Console.WriteLine("hit expected fake IOE"); } } } writer.Dispose(); dir.Dispose(); }
public virtual void TestSimpleMergedSegmentWramer() { Directory dir = NewDirectory(); AtomicBoolean didWarm = new AtomicBoolean(); InfoStream infoStream = new InfoStreamAnonymousInnerClassHelper(this, didWarm); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetReaderPooling(true).SetInfoStream(infoStream).SetMergedSegmentWarmer(new SimpleMergedSegmentWarmer(infoStream)).SetMergePolicy(NewLogMergePolicy(10))); Document doc = new Document(); doc.Add(NewStringField("foo", "bar", Field.Store.NO)); for (int i = 0; i < 20; i++) { w.AddDocument(doc); } w.WaitForMerges(); w.Dispose(); dir.Dispose(); Assert.IsTrue(didWarm.Get()); }
public virtual void Test() { Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir); long startTime = DateTime.Now.Millisecond; // TODO: replace w/ the @nightly test data; make this // into an optional @nightly stress test Document doc = new Document(); Field body = NewTextField("body", "", Field.Store.NO); doc.Add(body); StringBuilder sb = new StringBuilder(); for (int docCount = 0; docCount < NUM_DOCS; docCount++) { int numTerms = Random().Next(10); for (int termCount = 0; termCount < numTerms; termCount++) { sb.Append(Random().NextBoolean() ? "aaa" : "bbb"); sb.Append(' '); } body.StringValue = sb.ToString(); w.AddDocument(doc); sb.Remove(0, sb.Length); } IndexReader r = w.Reader; w.Dispose(); long endTime = DateTime.Now.Millisecond; if (VERBOSE) { Console.WriteLine("BUILD took " + (endTime - startTime)); } IndexSearcher s = NewSearcher(r); AtomicBoolean failed = new AtomicBoolean(); AtomicLong netSearch = new AtomicLong(); ThreadClass[] threads = new ThreadClass[NUM_SEARCH_THREADS]; for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++) { threads[threadID] = new ThreadAnonymousInnerClassHelper(this, s, failed, netSearch); threads[threadID].SetDaemon(true); } foreach (ThreadClass t in threads) { t.Start(); } foreach (ThreadClass t in threads) { t.Join(); } if (VERBOSE) { Console.WriteLine(NUM_SEARCH_THREADS + " threads did " + netSearch.Get() + " searches"); } r.Dispose(); dir.Dispose(); }
public ThreadAnonymousInnerClassHelper(TestSearchWithThreads outerInstance, IndexSearcher s, AtomicBoolean failed, AtomicLong netSearch) { this.OuterInstance = outerInstance; this.s = s; this.Failed = failed; this.NetSearch = netSearch; col = new TotalHitCountCollector(); }
public ThreadAnonymousInnerClassHelper(TestForceMergeForever outerInstance, Lucene.Net.Index.TestForceMergeForever.MyIndexWriter w, int numStartDocs, LineFileDocs docs, AtomicBoolean doStop) { this.OuterInstance = outerInstance; this.w = w; this.NumStartDocs = numStartDocs; this.Docs = docs; this.DoStop = doStop; }
public SerialMergeSchedulerAnonymousInnerClassHelper(BaseMergePolicyTestCase outerInstance, AtomicBoolean mayMerge) { this.OuterInstance = outerInstance; this.MayMerge = mayMerge; }
public virtual void TestMixedTypesDifferentThreads() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); CountDownLatch startingGun = new CountDownLatch(1); AtomicBoolean hitExc = new AtomicBoolean(); ThreadClass[] threads = new ThreadClass[3]; for (int i = 0; i < 3; i++) { Field field; if (i == 0) { field = new SortedDocValuesField("foo", new BytesRef("hello")); } else if (i == 1) { field = new NumericDocValuesField("foo", 0); } else { field = new BinaryDocValuesField("foo", new BytesRef("bazz")); } Document doc = new Document(); doc.Add(field); threads[i] = new ThreadAnonymousInnerClassHelper(this, w, startingGun, hitExc, doc); threads[i].Start(); } startingGun.countDown(); foreach (ThreadClass t in threads) { t.Join(); } Assert.IsTrue(hitExc.Get()); w.Dispose(); dir.Dispose(); }
public InfoStreamAnonymousInnerClassHelper(TestIndexWriterReader outerInstance, AtomicBoolean didWarm) { this.OuterInstance = outerInstance; this.DidWarm = didWarm; }
public ThreadAnonymousInnerClassHelper(TestDocValuesIndexing outerInstance, IndexWriter w, CountDownLatch startingGun, AtomicBoolean hitExc, Document doc) { this.OuterInstance = outerInstance; this.w = w; this.StartingGun = startingGun; this.HitExc = hitExc; this.Doc = doc; }
/// <param name="newStringField"> /// LUCENENET specific /// This is passed in because <see cref="LuceneTestCase.NewStringField(string, string, Field.Store)"/> /// is no longer static. /// </param> public ThreadAnonymousInnerClassHelper(Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI, Func<string, string, Field.Store, Field> newStringField) { NewStringField = newStringField; this.Dir = dir; this.w = w; this.Failed = failed; this.EndTime = endTime; this.FinalI = finalI; }