Пример #1
0
        protected override void DoSearching(TaskScheduler es, DateTime stopTime)
        {
            ThreadClass reopenThread = new ThreadAnonymousInnerClassHelper(this, stopTime);

            reopenThread.SetDaemon(true);
            reopenThread.Start();

            RunSearchThreads(stopTime);

            reopenThread.Join();
        }
Пример #2
0
        protected override void DoSearching(TaskScheduler es, long stopTime)
        {
            ThreadJob reopenThread = new ThreadAnonymousInnerClassHelper(this, stopTime);

            reopenThread.IsBackground = (true);
            reopenThread.Start();

            RunSearchThreads(stopTime);

            reopenThread.Join();
        }
Пример #3
0
        private void RunTest(Random random, Directory dir)
        {
            // Run for ~1 seconds
            long stopTime = Environment.TickCount + 1000;

            SnapshotDeletionPolicy dp     = DeletionPolicy;
            IndexWriter            writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetIndexDeletionPolicy(dp).SetMaxBufferedDocs(2));

            // Verify we catch misuse:
            try
            {
                dp.Snapshot();
                Assert.Fail("did not hit exception");
            }
#pragma warning disable 168
            catch (InvalidOperationException ise)
#pragma warning restore 168
            {
                // expected
            }
            dp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy;
            writer.Commit();

            ThreadClass t = new ThreadAnonymousInnerClassHelper(stopTime, writer, NewField);

            t.Start();

            // While the above indexing thread is running, take many
            // backups:
            do
            {
                BackupIndex(dir, dp);
                Thread.Sleep(20);
            } while (t.IsAlive);

            t.Join();

            // Add one more document to force writer to commit a
            // final segment, so deletion policy has a chance to
            // delete again:
            Document  doc        = new Document();
            FieldType customType = new FieldType(TextField.TYPE_STORED);
            customType.StoreTermVectors         = true;
            customType.StoreTermVectorPositions = true;
            customType.StoreTermVectorOffsets   = true;
            doc.Add(NewField("content", "aaa", customType));
            writer.AddDocument(doc);

            // Make sure we don't have any leftover files in the
            // directory:
            writer.Dispose();
            TestIndexWriter.AssertNoUnreferencedFiles(dir, "some files were not deleted but should have been");
        }
Пример #4
0
        public virtual void Test()
        {
            Directory         dir = NewFSDirectory(CreateTempDir("livefieldupdates"));
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            IndexWriter w = new IndexWriter(dir, iwc);

            SearcherManager mgr = new SearcherManager(w, true, new SearcherFactoryAnonymousInnerClassHelper());

            const int missing = -1;

            LiveFieldValues <IndexSearcher, int?> rt = new LiveFieldValuesAnonymousInnerClassHelper(mgr, missing);

            int numThreads = TestUtil.NextInt(Random(), 2, 5);

            if (VERBOSE)
            {
                Console.WriteLine(numThreads + " threads");
            }

            CountdownEvent      startingGun = new CountdownEvent(1);
            IList <ThreadClass> threads     = new List <ThreadClass>();

            int iters   = AtLeast(1000);
            int idCount = TestUtil.NextInt(Random(), 100, 10000);

            double reopenChance = Random().NextDouble() * 0.01;
            double deleteChance = Random().NextDouble() * 0.25;
            double addChance    = Random().NextDouble() * 0.5;

            for (int t = 0; t < numThreads; t++)
            {
                int         threadID     = t;
                Random      threadRandom = new Random(Random().Next());
                ThreadClass thread       = new ThreadAnonymousInnerClassHelper(w, mgr, missing, rt, startingGun, iters, idCount, reopenChance, deleteChance, addChance, t, threadID, threadRandom);
                threads.Add(thread);
                thread.Start();
            }

            startingGun.Signal();

            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }
            mgr.MaybeRefresh();
            Assert.AreEqual(0, rt.Count);

            rt.Dispose();
            mgr.Dispose();
            w.Dispose();
            dir.Dispose();
        }
Пример #5
0
        public virtual void Test()
        {
            Directory    d        = NewDirectory();
            MockAnalyzer analyzer = new MockAnalyzer(Random);

            analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH);

            MyIndexWriter w = new MyIndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));

            // Try to make an index that requires merging:
            w.Config.SetMaxBufferedDocs(TestUtil.NextInt32(Random, 2, 11));
            int          numStartDocs = AtLeast(20);
            LineFileDocs docs         = new LineFileDocs(Random, DefaultCodecSupportsDocValues);

            for (int docIDX = 0; docIDX < numStartDocs; docIDX++)
            {
                w.AddDocument(docs.NextDoc());
            }
            MergePolicy mp          = w.Config.MergePolicy;
            int         mergeAtOnce = 1 + w.segmentInfos.Count;

            if (mp is TieredMergePolicy)
            {
                ((TieredMergePolicy)mp).MaxMergeAtOnce = mergeAtOnce;
            }
            else if (mp is LogMergePolicy)
            {
                ((LogMergePolicy)mp).MergeFactor = mergeAtOnce;
            }
            else
            {
                // skip test
                w.Dispose();
                d.Dispose();
                return;
            }

            AtomicBoolean doStop = new AtomicBoolean();

            w.Config.SetMaxBufferedDocs(2);
            ThreadJob t = new ThreadAnonymousInnerClassHelper(this, w, numStartDocs, docs, doStop);

            t.Start();
            w.ForceMerge(1);
            doStop.Value = true;
            t.Join();
            Assert.IsTrue(w.mergeCount <= 1, "merge count is " + w.mergeCount);
            w.Dispose();
            d.Dispose();
            docs.Dispose();
        }
Пример #6
0
        public virtual void Test()
        {
            Directory d = NewDirectory();
            MockAnalyzer analyzer = new MockAnalyzer(Random());
            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);

            MyIndexWriter w = new MyIndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));

            // Try to make an index that requires merging:
            w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 2, 11));
            int numStartDocs = AtLeast(20);
            LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues());
            for (int docIDX = 0; docIDX < numStartDocs; docIDX++)
            {
                w.AddDocument(docs.NextDoc());
            }
            MergePolicy mp = w.Config.MergePolicy;
            int mergeAtOnce = 1 + w.GetSegmentInfosSize_Nunit();
            if (mp is TieredMergePolicy)
            {
                ((TieredMergePolicy)mp).MaxMergeAtOnce = mergeAtOnce;
            }
            else if (mp is LogMergePolicy)
            {
                ((LogMergePolicy)mp).MergeFactor = mergeAtOnce;
            }
            else
            {
                // skip test
                w.Dispose();
                d.Dispose();
                return;
            }

            AtomicBoolean doStop = new AtomicBoolean();
            w.Config.SetMaxBufferedDocs(2);
            ThreadClass t = new ThreadAnonymousInnerClassHelper(this, w, numStartDocs, docs, doStop);
            t.Start();
            w.ForceMerge(1);
            doStop.Set(true);
            t.Join();
            Assert.IsTrue(w.MergeCount.Get() <= 1, "merge count is " + w.MergeCount.Get());
            w.Dispose();
            d.Dispose();
            docs.Dispose();
        }
Пример #7
0
        public virtual void TestPartiallyAppliedGlobalSlice()
        {
            DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();

            System.Reflection.FieldInfo field = typeof(DocumentsWriterDeleteQueue).GetField("globalBufferLock",
                                                                                            BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Instance);
            ReentrantLock @lock = (ReentrantLock)field.GetValue(queue);

            @lock.Lock();
            var t = new ThreadAnonymousInnerClassHelper(this, queue);

            t.Start();
            t.Join();
            @lock.Unlock();
            Assert.IsTrue(queue.AnyChanges(), "changes in del queue but not in slice yet");
            queue.TryApplyGlobalSlice();
            Assert.IsTrue(queue.AnyChanges(), "changes in global buffer");
            FrozenBufferedUpdates freezeGlobalBuffer = queue.FreezeGlobalBuffer(null);

            Assert.IsTrue(freezeGlobalBuffer.Any());
            Assert.AreEqual(1, freezeGlobalBuffer.termCount);
            Assert.IsFalse(queue.AnyChanges(), "all changes applied");
        }
Пример #8
0
        public virtual void TestHashCodeWithThreads()
        {
            AutomatonQuery[] queries = new AutomatonQuery[1000];
            for (int i = 0; i < queries.Length; i++)
            {
                queries[i] = new AutomatonQuery(new Term("bogus", "bogus"), AutomatonTestUtil.RandomAutomaton(Random()));
            }
            CountdownEvent startingGun = new CountdownEvent(1);
            int            numThreads  = TestUtil.NextInt(Random(), 2, 5);

            ThreadClass[] threads = new ThreadClass[numThreads];
            for (int threadID = 0; threadID < numThreads; threadID++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, queries, startingGun);
                threads[threadID] = thread;
                thread.Start();
            }
            startingGun.Signal();
            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }
        }
 public virtual void TestPartiallyAppliedGlobalSlice()
 {
     DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
     System.Reflection.FieldInfo field = typeof(DocumentsWriterDeleteQueue).GetField("GlobalBufferLock", 
         BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Instance);
     ReentrantLock @lock = (ReentrantLock)field.GetValue(queue);
     @lock.Lock();
     ThreadClass t = new ThreadAnonymousInnerClassHelper(this, queue);
     t.Start();
     t.Join();
     @lock.Unlock();
     Assert.IsTrue(queue.AnyChanges(), "changes in del queue but not in slice yet");
     queue.TryApplyGlobalSlice();
     Assert.IsTrue(queue.AnyChanges(), "changes in global buffer");
     FrozenBufferedUpdates freezeGlobalBuffer = queue.FreezeGlobalBuffer(null);
     Assert.IsTrue(freezeGlobalBuffer.Any());
     Assert.AreEqual(1, freezeGlobalBuffer.TermCount);
     Assert.IsFalse(queue.AnyChanges(), "all changes applied");
 }
        public virtual void TestNrt()
        {
            Store.Directory   dir     = NewDirectory();
            Store.Directory   taxoDir = NewDirectory();
            IndexWriterConfig iwc     = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            // Don't allow tiny maxBufferedDocs; it can make this
            // test too slow:
            iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs));

            // MockRandom/AlcololicMergePolicy are too slow:
            TieredMergePolicy tmp = new TieredMergePolicy();

            tmp.FloorSegmentMB = .001;
            iwc.SetMergePolicy(tmp);
            IndexWriter  w      = new IndexWriter(dir, iwc);
            var          tw     = new DirectoryTaxonomyWriter(taxoDir);
            FacetsConfig config = new FacetsConfig();

            config.SetMultiValued("field", true);
            AtomicBoolean stop = new AtomicBoolean();

            // How many unique facets to index before stopping:
            int ordLimit = TEST_NIGHTLY ? 100000 : 6000;

            var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);

            var mgr = new SearcherTaxonomyManager(w, true, null, tw);

            var reopener = new ThreadAnonymousInnerClassHelper(this, stop, mgr);

            reopener.Name = "reopener";
            reopener.Start();

            indexer.Name = "indexer";
            indexer.Start();

            try
            {
                while (!stop.Get())
                {
                    SearcherAndTaxonomy pair = mgr.Acquire();
                    try
                    {
                        //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                        FacetsCollector sfc = new FacetsCollector();
                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
                        Facets      facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
                        FacetResult result = facets.GetTopChildren(10, "field");
                        if (pair.searcher.IndexReader.NumDocs > 0)
                        {
                            //System.out.println(pair.taxonomyReader.getSize());
                            Assert.True(result.ChildCount > 0);
                            Assert.True(result.LabelValues.Length > 0);
                        }

                        //if (VERBOSE) {
                        //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
                        //}
                    }
                    finally
                    {
                        mgr.Release(pair);
                    }
                }
            }
            finally
            {
                indexer.Join();
                reopener.Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: now stop");
            }

            IOUtils.Close(mgr, tw, w, taxoDir, dir);
        }
Пример #11
0
        public virtual void TestNoWaitClose()
        {
            Directory directory = NewDirectory();

            Document doc = new Document();
            FieldType customType = new FieldType(TextField.TYPE_STORED);
            customType.Tokenized = false;

            Field idField = NewField("id", "", customType);
            doc.Add(idField);

            for (int pass = 0; pass < 2; pass++)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: pass="******"TEST: iter=" + iter);
                    }
                    for (int j = 0; j < 199; j++)
                    {
                        idField.StringValue = Convert.ToString(iter * 201 + j);
                        writer.AddDocument(doc);
                    }

                    int delID = iter * 199;
                    for (int j = 0; j < 20; j++)
                    {
                        writer.DeleteDocuments(new Term("id", Convert.ToString(delID)));
                        delID += 5;
                    }

                    // Force a bunch of merge threads to kick off so we
                    // stress out aborting them on close:
                    ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;

                    IndexWriter finalWriter = writer;
                    List<Exception> failure = new List<Exception>();
                    ThreadClass t1 = new ThreadAnonymousInnerClassHelper(this, doc, finalWriter, failure);

                    if (failure.Count > 0)
                    {
                        throw failure[0];
                    }

                    t1.Start();

                    writer.Dispose(false);
                    t1.Join();

                    // Make sure reader can read
                    IndexReader reader = DirectoryReader.Open(directory);
                    reader.Dispose();

                    // Reopen
                    writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND).SetMergePolicy(NewLogMergePolicy()));
                }
                writer.Dispose();
            }

            directory.Dispose();
        }
Пример #12
0
 public virtual void TestHashCodeWithThreads()
 {
     AutomatonQuery[] queries = new AutomatonQuery[1000];
     for (int i = 0; i < queries.Length; i++)
     {
         queries[i] = new AutomatonQuery(new Term("bogus", "bogus"), AutomatonTestUtil.RandomAutomaton(Random()));
     }
     CountDownLatch startingGun = new CountDownLatch(1);
     int numThreads = TestUtil.NextInt(Random(), 2, 5);
     ThreadClass[] threads = new ThreadClass[numThreads];
     for (int threadID = 0; threadID < numThreads; threadID++)
     {
         ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, queries, startingGun);
         threads[threadID] = thread;
         thread.Start();
     }
     startingGun.countDown();
     foreach (ThreadClass thread in threads)
     {
         thread.Join();
     }
 }
Пример #13
0
        public virtual void TestNoWaitClose()
        {
            Directory directory = NewDirectory();

            Document  doc        = new Document();
            FieldType customType = new FieldType(TextField.TYPE_STORED);

            customType.IsTokenized = false;

            Field idField = NewField("id", "", customType);

            doc.Add(idField);

            for (int pass = 0; pass < 2; pass++)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: pass="******"TEST: iter=" + iter);
                    }
                    for (int j = 0; j < 199; j++)
                    {
                        idField.SetStringValue(Convert.ToString(iter * 201 + j));
                        writer.AddDocument(doc);
                    }

                    int delID = iter * 199;
                    for (int j = 0; j < 20; j++)
                    {
                        writer.DeleteDocuments(new Term("id", Convert.ToString(delID)));
                        delID += 5;
                    }

                    // Force a bunch of merge threads to kick off so we
                    // stress out aborting them on close:
                    ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 2;

                    IndexWriter      finalWriter = writer;
                    List <Exception> failure     = new List <Exception>();
                    ThreadJob        t1          = new ThreadAnonymousInnerClassHelper(this, doc, finalWriter, failure);

                    if (failure.Count > 0)
                    {
                        throw failure[0];
                    }

                    t1.Start();

                    writer.Dispose(false);
                    t1.Join();

                    // Make sure reader can read
                    IndexReader reader = DirectoryReader.Open(directory);
                    reader.Dispose();

                    // Reopen
                    writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy()));
                }
                writer.Dispose();
            }

            directory.Dispose();
        }
Пример #14
0
        public virtual void Test()
        {
            Directory    dir      = NewDirectory();
            MockAnalyzer analyzer = new MockAnalyzer(Random);

            analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH);
            RandomIndexWriter w = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir, analyzer);
            LineFileDocs docs         = new LineFileDocs(Random, DefaultCodecSupportsDocValues);
            int          charsToIndex = AtLeast(100000);
            int          charsIndexed = 0;

            //System.out.println("bytesToIndex=" + charsToIndex);
            while (charsIndexed < charsToIndex)
            {
                Document doc = docs.NextDoc();
                charsIndexed += doc.Get("body").Length;
                w.AddDocument(doc);
                //System.out.println("  bytes=" + charsIndexed + " add: " + doc);
            }
            IndexReader r = w.GetReader();

            //System.out.println("numDocs=" + r.NumDocs);
            w.Dispose();

            IndexSearcher s         = NewSearcher(r);
            Terms         terms     = MultiFields.GetFields(r).GetTerms("body");
            int           termCount = 0;
            TermsEnum     termsEnum = terms.GetIterator(null);

            while (termsEnum.Next() != null)
            {
                termCount++;
            }
            Assert.IsTrue(termCount > 0);

            // Target ~10 terms to search:
            double chance = 10.0 / termCount;

            termsEnum = terms.GetIterator(termsEnum);
            IDictionary <BytesRef, TopDocs> answers = new Dictionary <BytesRef, TopDocs>();

            while (termsEnum.Next() != null)
            {
                if (Random.NextDouble() <= chance)
                {
                    BytesRef term = BytesRef.DeepCopyOf(termsEnum.Term);
                    answers[term] = s.Search(new TermQuery(new Term("body", term)), 100);
                }
            }

            if (answers.Count > 0)
            {
                CountdownEvent startingGun = new CountdownEvent(1);
                int            numThreads  = TestUtil.NextInt32(Random, 2, 5);
                ThreadJob[]    threads     = new ThreadJob[numThreads];
                for (int threadID = 0; threadID < numThreads; threadID++)
                {
                    ThreadJob thread = new ThreadAnonymousInnerClassHelper(this, s, answers, startingGun);
                    threads[threadID] = thread;
                    thread.Start();
                }
                startingGun.Signal();
                foreach (ThreadJob thread in threads)
                {
                    thread.Join();
                }
            }
            r.Dispose();
            dir.Dispose();
        }
Пример #15
0
        private void AssertConsistentYoungestChild(FacetLabel abPath, int abOrd, int abYoungChildBase1, int abYoungChildBase2, int retry, int numCategories)
        {
            var indexDir = new SlowRAMDirectory(-1, null); // no slowness for intialization
            var tw = new DirectoryTaxonomyWriter(indexDir);
            tw.AddCategory(new FacetLabel("a", "0"));
            tw.AddCategory(abPath);
            tw.Commit();

            var tr = new DirectoryTaxonomyReader(indexDir);
            for (int i = 0; i < numCategories; i++)
            {
                var cp = new FacetLabel("a", "b", Convert.ToString(i));
                tw.AddCategory(cp);
                Assert.AreEqual(TaxonomyReader.INVALID_ORDINAL, tr.GetOrdinal(cp), "Ordinal of " + cp + " must be invalid until Taxonomy Reader was refreshed");
            }
            tw.Dispose();

            var stop = new AtomicBoolean(false);
            Exception[] error = new Exception[] { null };
            int[] retrieval = new int[] { 0 };

            var thread = new ThreadAnonymousInnerClassHelper(this, abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, tr, stop, error, retrieval);
            thread.Start();

            indexDir.SleepMillis = 1; // some delay for refresh
            var newTaxoReader = TaxonomyReader.OpenIfChanged(tr);
            if (newTaxoReader != null)
            {
                newTaxoReader.Dispose();
            }

            stop.Set(true);
            thread.Join();
            Assert.Null(error[0], "Unexpcted exception at retry " + retry + " retrieval " + retrieval[0] + ": \n" + stackTraceStr(error[0]));

            tr.Dispose();
        }
        private void RunTest(Random random, Directory dir)
        {
            // Run for ~1 seconds
            long stopTime = Environment.TickCount + 1000;

            SnapshotDeletionPolicy dp = DeletionPolicy;
            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetIndexDeletionPolicy(dp).SetMaxBufferedDocs(2));

            // Verify we catch misuse:
            try
            {
                dp.Snapshot();
                Assert.Fail("did not hit exception");
            }
            catch (InvalidOperationException ise)
            {
                // expected
            }
            dp = (SnapshotDeletionPolicy)writer.Config.DelPolicy;
            writer.Commit();

            ThreadClass t = new ThreadAnonymousInnerClassHelper(stopTime, writer);

            t.Start();

            // While the above indexing thread is running, take many
            // backups:
            do
            {
                BackupIndex(dir, dp);
                Thread.Sleep(20);
            } while (t.IsAlive);

            t.Join();

            // Add one more document to force writer to commit a
            // final segment, so deletion policy has a chance to
            // delete again:
            Document doc = new Document();
            FieldType customType = new FieldType(TextField.TYPE_STORED);
            customType.StoreTermVectors = true;
            customType.StoreTermVectorPositions = true;
            customType.StoreTermVectorOffsets = true;
            doc.Add(NewField("content", "aaa", customType));
            writer.AddDocument(doc);

            // Make sure we don't have any leftover files in the
            // directory:
            writer.Dispose();
            TestIndexWriter.AssertNoUnreferencedFiles(dir, "some files were not deleted but should have been");
        }
 //JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
 //ORIGINAL LINE: @BeforeClass public static void beforeClass() throws Exception
 public static void BeforeClass()
 {
     ThreadClass t = new ThreadAnonymousInnerClassHelper();
       t.Start();
       t.Join();
 }
        public virtual void TestNrt()
        {
            Store.Directory dir = NewDirectory();
            Store.Directory taxoDir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            // Don't allow tiny maxBufferedDocs; it can make this
            // test too slow:
            iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs));

            // MockRandom/AlcololicMergePolicy are too slow:
            TieredMergePolicy tmp = new TieredMergePolicy();
            tmp.FloorSegmentMB = .001;
            iwc.SetMergePolicy(tmp);
            IndexWriter w = new IndexWriter(dir, iwc);
            var tw = new DirectoryTaxonomyWriter(taxoDir);
            FacetsConfig config = new FacetsConfig();
            config.SetMultiValued("field", true);
            AtomicBoolean stop = new AtomicBoolean();

            // How many unique facets to index before stopping:
            int ordLimit = TEST_NIGHTLY ? 100000 : 6000;

            var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);

            var mgr = new SearcherTaxonomyManager(w, true, null, tw);

            var reopener = new ThreadAnonymousInnerClassHelper(this, stop, mgr);

            reopener.Name = "reopener";
            reopener.Start();

            indexer.Name = "indexer";
            indexer.Start();

            try
            {
                while (!stop.Get())
                {
                    SearcherAndTaxonomy pair = mgr.Acquire();
                    try
                    {
                        //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                        FacetsCollector sfc = new FacetsCollector();
                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
                        Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
                        FacetResult result = facets.GetTopChildren(10, "field");
                        if (pair.searcher.IndexReader.NumDocs > 0)
                        {
                            //System.out.println(pair.taxonomyReader.getSize());
                            Assert.True(result.ChildCount > 0);
                            Assert.True(result.LabelValues.Length > 0);
                        }

                        //if (VERBOSE) {
                        //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
                        //}
                    }
                    finally
                    {
                        mgr.Release(pair);
                    }
                }
            }
            finally
            {
                indexer.Join();
                reopener.Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: now stop");
            }

            IOUtils.Close(mgr, tw, w, taxoDir, dir);
        }
        public virtual void Test()
        {
            Directory dir = NewDirectory();
            MockAnalyzer analyzer = new MockAnalyzer(Random());
            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, analyzer, Similarity, TimeZone);
            LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues());
            int charsToIndex = AtLeast(100000);
            int charsIndexed = 0;
            //System.out.println("bytesToIndex=" + charsToIndex);
            while (charsIndexed < charsToIndex)
            {
                Document doc = docs.NextDoc();
                charsIndexed += doc.Get("body").Length;
                w.AddDocument(doc);
                //System.out.println("  bytes=" + charsIndexed + " add: " + doc);
            }
            IndexReader r = w.Reader;
            //System.out.println("numDocs=" + r.NumDocs);
            w.Dispose();

            IndexSearcher s = NewSearcher(r);
            Terms terms = MultiFields.GetFields(r).Terms("body");
            int termCount = 0;
            TermsEnum termsEnum = terms.Iterator(null);
            while (termsEnum.Next() != null)
            {
                termCount++;
            }
            Assert.IsTrue(termCount > 0);

            // Target ~10 terms to search:
            double chance = 10.0 / termCount;
            termsEnum = terms.Iterator(termsEnum);
            IDictionary<BytesRef, TopDocs> answers = new Dictionary<BytesRef, TopDocs>();
            while (termsEnum.Next() != null)
            {
                if (Random().NextDouble() <= chance)
                {
                    BytesRef term = BytesRef.DeepCopyOf(termsEnum.Term());
                    answers[term] = s.Search(new TermQuery(new Term("body", term)), 100);
                }
            }

            if (answers.Count > 0)
            {
                CountdownEvent startingGun = new CountdownEvent(1);
                int numThreads = TestUtil.NextInt(Random(), 2, 5);
                ThreadClass[] threads = new ThreadClass[numThreads];
                for (int threadID = 0; threadID < numThreads; threadID++)
                {
                    ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, s, answers, startingGun);
                    threads[threadID] = thread;
                    thread.Start();
                }
                startingGun.Signal();
                foreach (ThreadClass thread in threads)
                {
                    thread.Join();
                }
            }
            r.Dispose();
            dir.Dispose();
        }
Пример #20
0
 //JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
 //ORIGINAL LINE: @Test public void testFoo() throws Exception
 public virtual void TestFoo()
 {
     ThreadClass t = new ThreadAnonymousInnerClassHelper(this);
       t.Start();
       t.Join();
 }
        public virtual void Test()
        {
            Directory dir = NewDirectory();
            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));

            IList<long?> numbers = new List<long?>();
            IList<BytesRef> binary = new List<BytesRef>();
            IList<BytesRef> sorted = new List<BytesRef>();
            int numDocs = AtLeast(100);
            for (int i = 0; i < numDocs; i++)
            {
                Document d = new Document();
                long number = Random().NextLong();
                d.Add(new NumericDocValuesField("number", number));
                BytesRef bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
                d.Add(new BinaryDocValuesField("bytes", bytes));
                binary.Add(bytes);
                bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
                d.Add(new SortedDocValuesField("sorted", bytes));
                sorted.Add(bytes);
                w.AddDocument(d);
                numbers.Add(number);
            }

            w.ForceMerge(1);
            IndexReader r = w.Reader;
            w.Dispose();

            Assert.AreEqual(1, r.Leaves.Count);
            AtomicReader ar = (AtomicReader)r.Leaves[0].Reader;

            int numThreads = TestUtil.NextInt(Random(), 2, 5);
            IList<ThreadClass> threads = new List<ThreadClass>();
            CountDownLatch startingGun = new CountDownLatch(1);
            for (int t = 0; t < numThreads; t++)
            {
                Random threadRandom = new Random(Random().Next());
                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
                thread.Start();
                threads.Add(thread);
            }

            startingGun.countDown();

            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }

            r.Dispose();
            dir.Dispose();
        }