/// <summary> /// Creates search and taxonomy readers over the corresponding directories. /// /// <para> /// <b>NOTE:</b> you should only use this constructor if you commit and call /// <see cref="Search.ReferenceManager{G}.MaybeRefresh()"/> (on the <see cref="Index.ReaderManager"/>) in the same thread. Otherwise it could lead to an /// unsync'd <see cref="IndexSearcher"/> and <see cref="TaxonomyReader"/> pair. /// </para> /// </summary> public SearcherTaxonomyManager(Store.Directory indexDir, Store.Directory taxoDir, SearcherFactory searcherFactory) { if (searcherFactory is null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; var taxoReader = new DirectoryTaxonomyReader(taxoDir); Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher( searcherFactory, DirectoryReader.Open(indexDir)), taxoReader); this.taxoWriter = null; taxoEpoch = -1; }
public void Check_IndexReader_Get_Closed_When_Creating_New() { var dir = new RAMDirectory(); var writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); var manager = new SearcherManager(dir); var searcher1 = manager.GetSearcher(); // Change the index writer.AddDocument(new Document()); writer.Commit(); // Reopen the IndexReader manager.MaybeReopen(); var searcher2 = manager.GetSearcher(); Assert.AreNotEqual(searcher1.IndexReader, searcher2.IndexReader, "A new IndexReader was not created."); manager.ReleaseSearcher(searcher2); manager.ReleaseSearcher(searcher1); Assert.Throws <AlreadyClosedException>(() => manager.ReleaseSearcher(searcher1)); }
/// <summary> /// Creates near-real-time searcher and taxonomy reader /// from the corresponding writers. /// </summary> public SearcherTaxonomyManager(IndexWriter writer, bool applyAllDeletes, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter) { if (searcherFactory == null) { searcherFactory = new SearcherFactory(); } this.searcherFactory = searcherFactory; this.taxoWriter = taxoWriter; var taxoReader = new DirectoryTaxonomyReader(taxoWriter); Current = new SearcherAndTaxonomy(SearcherManager.GetSearcher( searcherFactory, DirectoryReader.Open(writer, applyAllDeletes)), taxoReader); this.taxoEpoch = taxoWriter.TaxonomyEpoch; }
private string[] Search(SearcherManager manager, string searchString) { var parser = new QueryParser(LuceneVersion, FieldName, _analyzer); Query q = parser.Parse(searchString); var searcher = manager.GetSearcher(); TopDocs hits = searcher.Search(q, null, 100, Sort.RELEVANCE); var resultStrings = new List <string>(hits.ScoreDocs.Length); resultStrings.AddRange(hits.ScoreDocs.Select(match => searcher.Doc(match.Doc)).Select(doc => doc.Get(FieldName))); manager.ReleaseSearcher(searcher); return(resultStrings.ToArray()); }
public static void SearchSample() { Directory dir = new RAMDirectory(); Analyzer analyzer = new StandardAnalyzer(Version); var indexWriter = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.UNLIMITED); SpatialContext ctx = SpatialContext.GEO; var strategy = new PointVectorStrategy(ctx, SpartialFieldName); //var precision = 8; // Precision 8 means down to 19 meter - higher precision consumes more memory //SpatialPrefixTree grid = new GeohashPrefixTree(ctx, precision); //var strategy = new RecursivePrefixTreeStrategy(grid, spartialFieldName); var docs = CreateSearchDocuments(GetDeals(), strategy); foreach (var doc in docs) { indexWriter.AddDocument(doc); } indexWriter.Commit(); indexWriter.Dispose(); // "Current" position Point littleMermaid = ctx.MakePoint(12.599239, 55.692848); //var parser = new QueryParser(Version, "title", analyzer); //Query q = parser.Parse("deal"); Query q = new MatchAllDocsQuery(); // NOTE: MatchAllDocsQuery always returns score as 1.0 // Add distance from current point to the scoring q = new DistanceCustomScoreQuery(q, strategy, littleMermaid); //q = new RecursivePrefixTreeStrategyDistanceCustomScoreQuery(q, strategy, littleMermaid, spartialFieldName); // Remove everything more than 2000 km away var filter = strategy.MakeFilter(new SpatialArgs(SpatialOperation.Intersects, ctx.MakeCircle(littleMermaid, DistanceUtils.Dist2Degrees(2000, DistanceUtils.EARTH_MEAN_RADIUS_KM)))); // Ensures the most recent searcher is used without destroying the Lucene IndexReader cache (via NRT) var searcherManager = new SearcherManager(dir); var collector = new GroupTopDocsCollector(5, SupplierFieldName); var searcher = searcherManager.GetSearcher(); try { searcher.Search(q, filter, collector); } finally { searcherManager.ReleaseSearcher(searcher); } var hits = collector.GroupTopDocs(); Console.WriteLine("Found {0} document(s) that matched query '{1}':", hits.TotalHits, q); foreach (var match in hits.GroupScoreDocs) { Document doc = searcher.Doc(match.Doc); Console.WriteLine("Best match '{0}' in group '{1}' with count {2} (MaxDoc: Score {3} Location '{4}')", doc.Get(TitleFieldName), match.GroupFieldValue, match.GroupCount, match.Score, doc.Get(LocationNameFieldName)); } }