protected override SearcherAndTaxonomy RefreshIfNeeded(SearcherAndTaxonomy @ref) { // Must re-open searcher first, otherwise we may get a // new reader that references ords not yet known to the // taxonomy reader: IndexReader r = @ref.searcher.IndexReader; IndexReader newReader = DirectoryReader.OpenIfChanged((DirectoryReader)r); if (newReader == null) { return(null); } else { var tr = TaxonomyReader.OpenIfChanged(@ref.taxonomyReader); if (tr == null) { @ref.taxonomyReader.IncRef(); tr = @ref.taxonomyReader; } else if (taxoWriter != null && taxoWriter.TaxonomyEpoch != taxoEpoch) { IOUtils.Close(newReader, tr); throw new ThreadStateException("DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager"); } return(new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, newReader), tr)); } }
/// <summary> /// Creates search and taxonomy readers over the corresponding directories. /// /// <para> /// <b>NOTE:</b> you should only use this constructor if you commit and call /// <seealso cref="#maybeRefresh()"/> in the same thread. Otherwise it could lead to an /// unsync'd <seealso cref="IndexSearcher"/> and <seealso cref="TaxonomyReader"/> pair. /// </para> /// </summary> public SearcherTaxonomyManager(Store.Directory indexDir, Store.Directory taxoDir, SearcherFactory searcherFactory) { if (searcherFactory == null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; var taxoReader = new DirectoryTaxonomyReader(taxoDir); Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, DirectoryReader.Open(indexDir)), taxoReader); this.taxoWriter = null; taxoEpoch = -1; }
/// <summary> /// Creates near-real-time searcher and taxonomy reader /// from the corresponding writers. /// </summary> public SearcherTaxonomyManager(IndexWriter writer, bool applyAllDeletes, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter) { if (searcherFactory == null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; this.taxoWriter = taxoWriter; var taxoReader = new DirectoryTaxonomyReader(taxoWriter); Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, DirectoryReader.Open(writer, applyAllDeletes)), taxoReader); this.taxoEpoch = taxoWriter.TaxonomyEpoch; }
protected override void DecRef(SearcherAndTaxonomy @ref) { @ref.searcher.IndexReader.DecRef(); // This decRef can fail, and then in theory we should // tryIncRef the searcher to put back the ref count // ... but 1) the below decRef should only fail because // it decRef'd to 0 and closed and hit some IOException // during close, in which case 2) very likely the // searcher was also just closed by the above decRef and // a tryIncRef would fail: @ref.taxonomyReader.DecRef(); }
protected override bool TryIncRef(SearcherAndTaxonomy @ref) { if (@ref.searcher.IndexReader.TryIncRef()) { if (@ref.taxonomyReader.TryIncRef()) { return(true); } else { @ref.searcher.IndexReader.DecRef(); } } return(false); }
public virtual void TestReplaceTaxonomyDirectory() { Store.Directory indexDir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); var tw = new DirectoryTaxonomyWriter(taxoDir); w.Commit(); tw.Commit(); Store.Directory taxoDir2 = NewDirectory(); var tw2 = new DirectoryTaxonomyWriter(taxoDir2); tw2.AddCategory(new FacetLabel("a", "b")); tw2.Dispose(); var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null); SearcherAndTaxonomy pair = mgr.Acquire(); try { Assert.AreEqual(1, pair.taxonomyReader.Size); } finally { mgr.Release(pair); } w.AddDocument(new Document()); tw.ReplaceTaxonomy(taxoDir2); taxoDir2.Dispose(); w.Commit(); tw.Commit(); mgr.MaybeRefresh(); pair = mgr.Acquire(); try { Assert.AreEqual(3, pair.taxonomyReader.Size); } finally { mgr.Release(pair); } IOUtils.Close(mgr, tw, w, taxoDir, indexDir); }
protected override int GetRefCount(SearcherAndTaxonomy reference) { return(reference.searcher.IndexReader.RefCount); }
public virtual void TestDirectory() { Store.Directory indexDir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); var tw = new DirectoryTaxonomyWriter(taxoDir); // first empty commit w.Commit(); tw.Commit(); var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: int ordLimit = TEST_NIGHTLY ? 100000 : 6000; var indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop); indexer.Start(); try { while (!stop.Get()) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.True(result.ChildCount > 0); Assert.True(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); } if (VERBOSE) { Console.WriteLine("TEST: now stop"); } IOUtils.Close(mgr, tw, w, taxoDir, indexDir); }
public virtual void TestNrt() { Store.Directory dir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); // Don't allow tiny maxBufferedDocs; it can make this // test too slow: iwc.SetMaxBufferedDocs(Math.Max(500, iwc.MaxBufferedDocs)); // MockRandom/AlcololicMergePolicy are too slow: TieredMergePolicy tmp = new TieredMergePolicy(); tmp.FloorSegmentMB = .001; iwc.SetMergePolicy(tmp); IndexWriter w = new IndexWriter(dir, iwc); var tw = new DirectoryTaxonomyWriter(taxoDir); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: int ordLimit = TEST_NIGHTLY ? 100000 : 6000; var indexer = new IndexerThread(w, config, tw, null, ordLimit, stop); var mgr = new SearcherTaxonomyManager(w, true, null, tw); var reopener = new ThreadAnonymousInnerClassHelper(this, stop, mgr); reopener.Name = "reopener"; reopener.Start(); indexer.Name = "indexer"; indexer.Start(); try { while (!stop.Get()) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.True(result.ChildCount > 0); Assert.True(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); reopener.Join(); } if (VERBOSE) { Console.WriteLine("TEST: now stop"); } IOUtils.Close(mgr, tw, w, taxoDir, dir); }
public virtual void Test_Directory() // LUCENENET specific - name collides with property of LuceneTestCase { Store.Directory indexDir = NewDirectory(); Store.Directory taxoDir = NewDirectory(); IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); var tw = new DirectoryTaxonomyWriter(taxoDir); // first empty commit w.Commit(); tw.Commit(); var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null); FacetsConfig config = new FacetsConfig(); config.SetMultiValued("field", true); AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: //int ordLimit = TestNightly ? 100000 : 6000; // LUCENENET specific: 100000 facets takes about 2-3 hours. To keep it under // the 1 hour free limit of Azure DevOps, this was reduced to 30000. int ordLimit = TestNightly ? 30000 : 6000; var indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop); indexer.Start(); try { while (!stop) { SearcherAndTaxonomy pair = mgr.Acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.Searcher.Search(new MatchAllDocsQuery(), sfc); Facets facets = GetTaxonomyFacetCounts(pair.TaxonomyReader, config, sfc); FacetResult result = facets.GetTopChildren(10, "field"); if (pair.Searcher.IndexReader.NumDocs > 0) { //System.out.println(pair.taxonomyReader.getSize()); Assert.IsTrue(result.ChildCount > 0); Assert.IsTrue(result.LabelValues.Length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.Release(pair); } } } finally { indexer.Join(); } if (Verbose) { Console.WriteLine("TEST: now stop"); } IOUtils.Dispose(mgr, tw, w, taxoDir, indexDir); }