public virtual void Test()
        {
            // NOTE: if we see a fail on this test with "NestedPulsing" its because its
            // reuse isnt perfect (but reasonable). see TestPulsingReuse.testNestedPulsing
            // for more details
            MockDirectoryWrapper dir = NewMockDirectory();
            TieredMergePolicy tmp = new TieredMergePolicy();
            tmp.MaxMergeAtOnce = 2;
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(2).SetMergePolicy(tmp));
            const int numDocs = 20;
            for (int docs = 0; docs < numDocs; docs++)
            {
                StringBuilder sb = new StringBuilder();
                for (int terms = 0; terms < 100; terms++)
                {
                    sb.Append(TestUtil.RandomRealisticUnicodeString(Random()));
                    sb.Append(' ');
                }
                Document doc = new Document();
                doc.Add(new TextField("field", sb.ToString(), Field.Store.NO));
                w.AddDocument(doc);
            }
            IndexReader r = w.Reader;
            w.Dispose();

            int cloneCount = dir.InputCloneCount;
            //System.out.println("merge clone count=" + cloneCount);
            Assert.IsTrue(cloneCount < 500, "too many calls to IndexInput.clone during merging: " + dir.InputCloneCount);

            IndexSearcher s = NewSearcher(r);

            // MTQ that matches all terms so the AUTO_REWRITE should
            // cutover to filter rewrite and reuse a single DocsEnum
            // across all terms;
            TopDocs hits = s.Search(new TermRangeQuery("field", new BytesRef(), new BytesRef("\uFFFF"), true, true), 10);
            Assert.IsTrue(hits.TotalHits > 0);
            int queryCloneCount = dir.InputCloneCount - cloneCount;
            //System.out.println("query clone count=" + queryCloneCount);
            Assert.IsTrue(queryCloneCount < 50, "too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount);
            r.Dispose();
            dir.Dispose();
        }
Example #2
0
        internal static MergePolicy NewSortingMergePolicy(Sort sort)
        {
            // create a MP with a low merge factor so that many merges happen
            MergePolicy mp;

            if (Random.nextBoolean())
            {
                TieredMergePolicy tmp = NewTieredMergePolicy(Random);
                int numSegs           = TestUtil.NextInt32(Random, 3, 5);
                tmp.SegmentsPerTier = (numSegs);
                tmp.MaxMergeAtOnce  = (TestUtil.NextInt32(Random, 2, numSegs));
                mp = tmp;
            }
            else
            {
                LogMergePolicy lmp = NewLogMergePolicy(Random);
                lmp.MergeFactor = TestUtil.NextInt32(Random, 3, 5);
                mp = lmp;
            }
            // wrap it with a sorting mp
            return(new SortingMergePolicy(mp, sort));
        }
        public virtual void TestNrt()
        {
            Store.Directory   dir     = NewDirectory();
            Store.Directory   taxoDir = NewDirectory();
            IndexWriterConfig iwc     = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            // Don't allow tiny maxBufferedDocs; it can make this
            // test too slow:
            iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs));

            // MockRandom/AlcololicMergePolicy are too slow:
            TieredMergePolicy tmp = new TieredMergePolicy();

            tmp.FloorSegmentMB = .001;
            iwc.SetMergePolicy(tmp);
            IndexWriter  w      = new IndexWriter(dir, iwc);
            var          tw     = new DirectoryTaxonomyWriter(taxoDir);
            FacetsConfig config = new FacetsConfig();

            config.SetMultiValued("field", true);
            AtomicBoolean stop = new AtomicBoolean();

            // How many unique facets to index before stopping:
            int ordLimit = TEST_NIGHTLY ? 100000 : 6000;

            var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);

            var mgr = new SearcherTaxonomyManager(w, true, null, tw);

            var reopener = new ThreadAnonymousInnerClassHelper(this, stop, mgr);

            reopener.Name = "reopener";
            reopener.Start();

            indexer.Name = "indexer";
            indexer.Start();

            try
            {
                while (!stop.Get())
                {
                    SearcherAndTaxonomy pair = mgr.Acquire();
                    try
                    {
                        //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                        FacetsCollector sfc = new FacetsCollector();
                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
                        Facets      facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
                        FacetResult result = facets.GetTopChildren(10, "field");
                        if (pair.searcher.IndexReader.NumDocs > 0)
                        {
                            //System.out.println(pair.taxonomyReader.getSize());
                            Assert.True(result.ChildCount > 0);
                            Assert.True(result.LabelValues.Length > 0);
                        }

                        //if (VERBOSE) {
                        //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
                        //}
                    }
                    finally
                    {
                        mgr.Release(pair);
                    }
                }
            }
            finally
            {
                indexer.Join();
                reopener.Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: now stop");
            }

            IOUtils.Close(mgr, tw, w, taxoDir, dir);
        }
        public virtual void TestMaxMergeCount()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            int maxMergeCount = TestUtil.NextInt(Random(), 1, 5);
            int maxMergeThreads = TestUtil.NextInt(Random(), 1, maxMergeCount);
            CountdownEvent enoughMergesWaiting = new CountdownEvent(maxMergeCount);
            AtomicInteger runningMergeCount = new AtomicInteger(0);
            AtomicBoolean failed = new AtomicBoolean();

            if (VERBOSE)
            {
                Console.WriteLine("TEST: maxMergeCount=" + maxMergeCount + " maxMergeThreads=" + maxMergeThreads);
            }

            ConcurrentMergeScheduler cms = new ConcurrentMergeSchedulerAnonymousInnerClassHelper(this, maxMergeCount, enoughMergesWaiting, runningMergeCount, failed);
            cms.SetMaxMergesAndThreads(maxMergeCount, maxMergeThreads);
            iwc.SetMergeScheduler(cms);
            iwc.SetMaxBufferedDocs(2);

            TieredMergePolicy tmp = new TieredMergePolicy();
            iwc.SetMergePolicy(tmp);
            tmp.MaxMergeAtOnce = 2;
            tmp.SegmentsPerTier = 2;

            IndexWriter w = new IndexWriter(dir, iwc);
            Document doc = new Document();
            doc.Add(NewField("field", "field", TextField.TYPE_NOT_STORED));
            while (enoughMergesWaiting.CurrentCount != 0 && !failed.Get())
            {
                for (int i = 0; i < 10; i++)
                {
                    w.AddDocument(doc);
                }
            }
            w.Dispose(false);
            dir.Dispose();
        }
Example #5
0
 public SegmentByteSizeDescending(TieredMergePolicy outerInstance)
 {
     this.outerInstance = outerInstance;
 }
        public virtual void TestForceMergeDeletesMaxSegSize()
        {
            Directory dir = NewDirectory();
            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            TieredMergePolicy tmp = new TieredMergePolicy();
            tmp.MaxMergedSegmentMB = 0.01;
            tmp.ForceMergeDeletesPctAllowed = 0.0;
            conf.SetMergePolicy(tmp);

            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, conf);
            w.RandomForceMerge = false;

            int numDocs = AtLeast(200);
            for (int i = 0; i < numDocs; i++)
            {
                Document doc = new Document();
                doc.Add(NewStringField("id", "" + i, Field.Store.NO));
                doc.Add(NewTextField("content", "aaa " + i, Field.Store.NO));
                w.AddDocument(doc);
            }

            w.ForceMerge(1);
            IndexReader r = w.Reader;
            Assert.AreEqual(numDocs, r.MaxDoc);
            Assert.AreEqual(numDocs, r.NumDocs);
            r.Dispose();

            if (VERBOSE)
            {
                Console.WriteLine("\nTEST: delete doc");
            }

            w.DeleteDocuments(new Term("id", "" + (42 + 17)));

            r = w.Reader;
            Assert.AreEqual(numDocs, r.MaxDoc);
            Assert.AreEqual(numDocs - 1, r.NumDocs);
            r.Dispose();

            w.ForceMergeDeletes();

            r = w.Reader;
            Assert.AreEqual(numDocs - 1, r.MaxDoc);
            Assert.AreEqual(numDocs - 1, r.NumDocs);
            r.Dispose();

            w.Dispose();

            dir.Dispose();
        }
        public virtual void TestSetters()
        {
            TieredMergePolicy tmp = new TieredMergePolicy();

            tmp.MaxMergedSegmentMB = 0.5;
            Assert.AreEqual(0.5, tmp.MaxMergedSegmentMB, EPSILON);

            tmp.MaxMergedSegmentMB = double.PositiveInfinity;
            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxMergedSegmentMB, EPSILON * long.MaxValue);

            tmp.MaxMergedSegmentMB = long.MaxValue / 1024 / 1024.0;
            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxMergedSegmentMB, EPSILON * long.MaxValue);

            try
            {
                tmp.MaxMergedSegmentMB = -2.0;
                Assert.Fail("Didn't throw IllegalArgumentException");
            }
            catch (System.ArgumentException iae)
            {
                // pass
            }

            tmp.FloorSegmentMB = 2.0;
            Assert.AreEqual(2.0, tmp.FloorSegmentMB, EPSILON);

            tmp.FloorSegmentMB = double.PositiveInfinity;
            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.FloorSegmentMB, EPSILON * long.MaxValue);

            tmp.FloorSegmentMB = long.MaxValue / 1024 / 1024.0;
            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.FloorSegmentMB, EPSILON * long.MaxValue);

            try
            {
                tmp.FloorSegmentMB = -2.0;
                Assert.Fail("Didn't throw IllegalArgumentException");
            }
            catch (System.ArgumentException iae)
            {
                // pass
            }

            tmp.MaxCFSSegmentSizeMB = 2.0;
            Assert.AreEqual(2.0, tmp.MaxCFSSegmentSizeMB, EPSILON);

            tmp.MaxCFSSegmentSizeMB = double.PositiveInfinity;
            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxCFSSegmentSizeMB, EPSILON * long.MaxValue);

            tmp.MaxCFSSegmentSizeMB = long.MaxValue / 1024 / 1024.0;
            Assert.AreEqual(long.MaxValue / 1024 / 1024.0, tmp.MaxCFSSegmentSizeMB, EPSILON * long.MaxValue);

            try
            {
                tmp.MaxCFSSegmentSizeMB = -2.0;
                Assert.Fail("Didn't throw IllegalArgumentException");
            }
            catch (System.ArgumentException iae)
            {
                // pass
            }

            // TODO: Add more checks for other non-double setters!
        }
Example #8
0
 public static TieredMergePolicy NewTieredMergePolicy(Random r)
 {
     TieredMergePolicy tmp = new TieredMergePolicy();
     if (Rarely(r))
     {
         tmp.MaxMergeAtOnce = TestUtil.NextInt(r, 2, 9);
         tmp.MaxMergeAtOnceExplicit = TestUtil.NextInt(r, 2, 9);
     }
     else
     {
         tmp.MaxMergeAtOnce = TestUtil.NextInt(r, 10, 50);
         tmp.MaxMergeAtOnceExplicit = TestUtil.NextInt(r, 10, 50);
     }
     if (Rarely(r))
     {
         tmp.MaxMergedSegmentMB = 0.2 + r.NextDouble() * 2.0;
     }
     else
     {
         tmp.MaxMergedSegmentMB = r.NextDouble() * 100;
     }
     tmp.FloorSegmentMB = 0.2 + r.NextDouble() * 2.0;
     tmp.ForceMergeDeletesPctAllowed = 0.0 + r.NextDouble() * 30.0;
     if (Rarely(r))
     {
         tmp.SegmentsPerTier = TestUtil.NextInt(r, 2, 20);
     }
     else
     {
         tmp.SegmentsPerTier = TestUtil.NextInt(r, 10, 50);
     }
     ConfigureRandom(r, tmp);
     tmp.ReclaimDeletesWeight = r.NextDouble() * 4;
     return tmp;
 }
        public virtual void TestNrt()
        {
            Store.Directory dir = NewDirectory();
            Store.Directory taxoDir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            // Don't allow tiny maxBufferedDocs; it can make this
            // test too slow:
            iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs));

            // MockRandom/AlcololicMergePolicy are too slow:
            TieredMergePolicy tmp = new TieredMergePolicy();
            tmp.FloorSegmentMB = .001;
            iwc.SetMergePolicy(tmp);
            IndexWriter w = new IndexWriter(dir, iwc);
            var tw = new DirectoryTaxonomyWriter(taxoDir);
            FacetsConfig config = new FacetsConfig();
            config.SetMultiValued("field", true);
            AtomicBoolean stop = new AtomicBoolean();

            // How many unique facets to index before stopping:
            int ordLimit = TEST_NIGHTLY ? 100000 : 6000;

            var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);

            var mgr = new SearcherTaxonomyManager(w, true, null, tw);

            var reopener = new ThreadAnonymousInnerClassHelper(this, stop, mgr);

            reopener.Name = "reopener";
            reopener.Start();

            indexer.Name = "indexer";
            indexer.Start();

            try
            {
                while (!stop.Get())
                {
                    SearcherAndTaxonomy pair = mgr.Acquire();
                    try
                    {
                        //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                        FacetsCollector sfc = new FacetsCollector();
                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
                        Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
                        FacetResult result = facets.GetTopChildren(10, "field");
                        if (pair.searcher.IndexReader.NumDocs > 0)
                        {
                            //System.out.println(pair.taxonomyReader.getSize());
                            Assert.True(result.ChildCount > 0);
                            Assert.True(result.LabelValues.Length > 0);
                        }

                        //if (VERBOSE) {
                        //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
                        //}
                    }
                    finally
                    {
                        mgr.Release(pair);
                    }
                }
            }
            finally
            {
                indexer.Join();
                reopener.Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: now stop");
            }

            IOUtils.Close(mgr, tw, w, taxoDir, dir);
        }