public virtual void TestDeleteDocuments() { Directory directory = CreateIndex(); IndexWriter writer = GetWriter(directory); ReferenceManager <IndexSearcher> mgr = new SearcherManager(writer, true, new SearcherFactory()); IndexSearcher searcher = mgr.Acquire(); TopDocs topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100); Assert.AreEqual(1, topDocs.TotalHits); TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer); long result = mgrWriter.DeleteDocuments(new TermQuery(new Term("foo", "0"))); Assert.AreEqual(1, result); // writer.Commit(); Assert.IsTrue(writer.HasDeletions()); mgr.MaybeRefresh(); searcher = mgr.Acquire(); topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100); Assert.AreEqual(0, topDocs.TotalHits); }
public virtual void TestTryDeleteDocument() { Directory directory = CreateIndex(); IndexWriter writer = GetWriter(directory); ReferenceManager <IndexSearcher> mgr = new SearcherManager(writer, true, new SearcherFactory()); TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer); IndexSearcher searcher = mgr.Acquire(); TopDocs topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100); Assert.AreEqual(1, topDocs.TotalHits); long result; if (Random.NextBoolean()) { IndexReader r = DirectoryReader.Open(writer, true); result = mgrWriter.TryDeleteDocument(r, 0); r.Dispose(); } else { result = mgrWriter.TryDeleteDocument(searcher.IndexReader, 0); } // The tryDeleteDocument should have succeeded: Assert.IsTrue(result != -1); Assert.IsTrue(writer.HasDeletions()); if (Random.NextBoolean()) { writer.Commit(); } Assert.IsTrue(writer.HasDeletions()); mgr.MaybeRefresh(); searcher = mgr.Acquire(); topDocs = searcher.Search(new TermQuery(new Term("foo", "0")), 100); Assert.AreEqual(0, topDocs.TotalHits); }
public IList <LuceneSearchResult> Search(string term, string screeningEntityName, int numResults) { using (SearcherManager manager = new SearcherManager(ScreeningResponseIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; QueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "Reason", "Commentary" }, new SimpleAnalyzer()); parser.DefaultOperator = QueryParser.Operator.AND; parser.AllowLeadingWildcard = true; try { Query query = parser.Parse(term); BooleanQuery bq = new BooleanQuery(); bq.Add(query, Occur.MUST); bq.Add(new TermQuery(new Term("ScreeningEntityName", screeningEntityName)), Occur.MUST); log.Debug("Search query: " + bq.ToString()); this.topDocs = this.searcher.Search(bq, numResults); return(TransformTopDocs()); } catch (ParseException e) { log.Error("Encountered problem parsing the search term: " + term, e); return(new List <LuceneSearchResult>()); } } }
// See PersonIndexer.cs for details on how fields were originally indexed. public IList <LuceneSearchResult> Search(string term, int numResults, bool includeRestrictedProfiles) { using (SearcherManager manager = new SearcherManager(PersonIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; try { Query query = CreateQueryFromTerm(term); if (query == null) { return(new List <LuceneSearchResult>()); } // filter on Person.IsRestrictedProfile since restricted profiles are not viewable in screening if (!includeRestrictedProfiles) { BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.Add(query, Occur.MUST); booleanQuery.Add(new TermQuery(new Term("IsRestrictedProfile", "0")), Occur.MUST); query = booleanQuery; } this.topDocs = this.searcher.Search(query, numResults); return(TransformTopDocs()); } catch (ParseException e) { log.Error("Encountered problem parsing the search term: " + term, e); return(new List <LuceneSearchResult>()); } } }
public int GetMaxSourceID() { using (SearcherManager manager = new SearcherManager(SourceIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; Query query = NumericRangeQuery.NewIntRange("Id", 1, int.MaxValue, true, true); this.topDocs = this.searcher.Search(query, null, 1, new Sort(new SortField("Id", SortField.INT, true))); if (this.topDocs != null && this.topDocs.ScoreDocs != null && this.topDocs.ScoreDocs.Length > 0) { ScoreDoc scoreDoc = this.topDocs.ScoreDocs[0]; LuceneSearchResult result = new LuceneSearchResult(this.searcher.Doc(scoreDoc.Doc), scoreDoc.Score, this.topDocs.TotalHits); int id = 0; if (result.FieldValues != null && result.FieldValues["Id"] != null && result.FieldValues["Id"].Count > 0 && int.TryParse(result.FieldValues["Id"][0], out id)) { return(id); } } } return(0); }
public string Search(string input) { Query query = parser.Parse(input); IndexSearcher searcher = searcherManager.Acquire(); TopDocs results = searcher.Search(query, 5); StringBuilder resultBuilder = new StringBuilder(); resultBuilder.AppendLine($"Search for: {query} in index '{name}' resulted in a total of {results.TotalHits} hits."); resultBuilder.AppendLine($"============================== RESULTS =============================="); foreach (string result in results.ScoreDocs .Select(doc => searcher.Doc(doc.Doc)) .Select(doc => { StringBuilder builder = new StringBuilder(); builder.AppendLine($"id: {doc.GetField("id").GetStringValue()}"); builder.AppendLine($" - last updated: {doc.GetField("updated").GetStringValue()} by {doc.GetField("by").GetStringValue()}"); builder.AppendLine($" - {doc.GetField("content").GetStringValue()}"); return(builder.ToString()); })) { resultBuilder.AppendLine(result); resultBuilder.AppendLine(); } searcherManager.Release(searcher); return(resultBuilder.ToString()); }
public IList <LuceneSearchResult> AllSourcesWithCaseNumbers(string term, int numResults, bool includeRestrictedSources, string sortField, bool descending) { using (SearcherManager manager = new SearcherManager(SourceIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; BooleanQuery booleanQuery = new BooleanQuery(); if (string.IsNullOrEmpty(term)) { booleanQuery.Add(new MatchAllDocsQuery(), Occur.MUST); } else { booleanQuery.Add(new WildcardQuery(new Term("JhroCaseNumber", "*" + term + "*")), Occur.MUST); } // value '0' set by SourceIndexer booleanQuery.Add(new TermQuery(new Term("JhroCaseNumber", "0")), Occur.MUST_NOT); if (!includeRestrictedSources) { booleanQuery.Add(new TermQuery(new Term("IsRestricted", "0")), Occur.MUST); } log.Debug("Search query: " + booleanQuery.ToString()); this.PerformSearch(booleanQuery, numResults, sortField, descending); return(this.TransformTopDocs()); } }
public IEnumerable <MenuItem> Search(string queryString, ResultFilter filter = null) { if (queryString != null) { queryString = queryString.Trim(); } if (string.IsNullOrEmpty(queryString)) { yield break; } queryString = Escape(queryString); queryString += "~"; // FuzzyQuery query = new FuzzyQuery(new Term("joe", queryString), 5); // parser.FuzzyMinSim = 10f; Query query = parser.Parse(queryString); manager.MaybeRefreshBlocking(); IndexSearcher searcher = manager.Acquire(); try { TopDocs documents = searcher.Search(query, (filter ?? ResultFilter.Default).Limit > 0 ? filter.Limit : 1); foreach (ScoreDoc scoreDocument in documents.ScoreDocs) { Document document = searcher.Doc(scoreDocument.Doc); yield return(items[document.GetField(DESCRIPTION).GetStringValue()]); } } finally { manager.Release(searcher); searcher = null; } }
public static IReadOnlyList <EntryWithScore> SearchFull(SearcherManager manager, QueryParser queryParser, string searchTerm) { IndexSearcher searcher = null; try { searcher = manager.Acquire(); var query = queryParser.Parse(searchTerm); var searchResults = searcher.Search(query, null, int.MaxValue, IdSort, true, true); var results = new EntryWithScore[searchResults.TotalHits]; for (var i = 0; i < searchResults.ScoreDocs.Length; i++) { var doc = searchResults.ScoreDocs[i]; var item = searcher.Doc(doc.Doc); var id = item.GetField(nameof(Entry.Id)).GetInt32Value(); if (!id.HasValue) { throw new InvalidOperationException($"Id did not have a value for document: {item}."); } var time = item.GetField(nameof(Entry.Time)).GetInt64Value(); if (!time.HasValue) { throw new InvalidOperationException($"Time did not have a value for document: {item}."); } var date = DateTimeOffset.FromUnixTimeSeconds(time.Value).UtcDateTime; var entry = new EntryWithScore { Id = id.Value, Date = date, Score = doc.Score, Time = time.Value, Url = item.Get(nameof(Entry.Url)), Title = item.Get(nameof(Entry.Title)) }; results[i] = entry; } return(results); } finally { manager.Release(searcher); } }
public IList <LuceneSearchResult> Search(string term, string prefix, bool usePrefixQuery, DateTime?start, DateTime?end, int numResults, bool canViewAndSearchAll, bool includeRestrictedSources, string uploadedByUserId, IList <string> owners, string sortField, bool descending) { using (SearcherManager manager = new SearcherManager(SourceIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; Query query = this.BuildQuery(term, prefix, usePrefixQuery, start, end, canViewAndSearchAll, includeRestrictedSources, uploadedByUserId, owners); this.PerformSearch(query, numResults, sortField, descending); return(TransformTopDocs(query)); } }
// See UnitIndexer.cs for details on how fields were originally indexed. public IList <LuceneSearchResult> Search(string term, int numResults) { using (SearcherManager manager = new SearcherManager(UnitIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; QueryParser parser; if (!string.IsNullOrEmpty(term) && term.Trim().StartsWith("Id")) { // Single and ranged numeric value search on Id field parser = new NumericQueryParser(Lucene.Net.Util.Version.LUCENE_30, "Id", new PersonAnalyzer()); } else { // General search across text fields parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "Name", "ParentNameChange", "ChildNameChange", "BackgroundInformation", "Organization" }, new PersonAnalyzer()); // We maintain OR as default for maximum results parser.DefaultOperator = QueryParser.Operator.OR; if (!string.IsNullOrEmpty(term)) { if (!term.Contains(':')) { // Edit user's search string and add wildcards. term = string.Join(" ", term.Split(new string[] { " " }, System.StringSplitOptions.RemoveEmptyEntries) .Select(x => "*" + x + "*") .ToArray() ); } } } parser.AllowLeadingWildcard = true; try { Query query = parser.Parse(term); log.Debug("Search query: " + query.ToString()); this.topDocs = this.searcher.Search(query, numResults); return(TransformTopDocs()); } catch (ParseException e) { log.Error("Encountered problem parsing the search term: " + term, e); return(new List <LuceneSearchResult>()); } } }
public static IReadOnlyList <LocatedEntry> Search(SearcherManager manager, QueryParser queryParser, string searchTerm) { IndexSearcher searcher = null; try { manager.MaybeRefresh(); searcher = manager.Acquire(); var query = queryParser.Parse(searchTerm); var searchResults = searcher.Search(query, int.MaxValue, IdSort); var results = new LocatedEntry[searchResults.TotalHits]; for (var i = 0; i < searchResults.ScoreDocs.Length; i++) { var doc = searchResults.ScoreDocs[i]; var item = searcher.Doc(doc.Doc); var id = item.GetField(nameof(Entry.Id)).GetInt32Value(); if (!id.HasValue) { throw new InvalidOperationException($"Id did not have a value for document: {item}."); } var time = item.GetField(nameof(Entry.Time)).GetInt64Value(); if (!time.HasValue) { throw new InvalidOperationException($"Time did not have a value for document: {item}."); } var date = DateTimeOffset.FromUnixTimeSeconds(time.Value).UtcDateTime; results[i] = new LocatedEntry(id.Value, date); } return(results); } finally { manager.Release(searcher); } }
public SearchResults Search(string queryString) { int resultsPerPage = 10; var analyzer = SetupAnalyzer(); var queryParser = SetupQueryParser(analyzer); IEnumerable <FieldDefinition> fields = new List <FieldDefinition> { new FieldDefinition { Name = "title", isDefault = true }, new FieldDefinition { Name = "description", isDefault = false } }; // Query query = BuildQuery(queryString,queryParser); // BuildQuery(queryString, fields); // Query query; if (queryString.EndsWith('~')) { query = BuildQuery(queryString, queryParser); } else { query = BuildQuery(queryString, fields); } using (var writer = new IndexWriter(_directory, new IndexWriterConfig(MATCH_LUCENE_VERSION, analyzer))) { var searchManager = new SearcherManager(writer, true, null); searchManager.MaybeRefreshBlocking(); IndexSearcher searcher = searchManager.Acquire(); try { TopDocs topDocs = searcher.Search(query, resultsPerPage); return(CompileResults(searcher, topDocs)); } finally { searchManager?.Release(searcher); searchManager?.Dispose(); searcher = null; analyzer?.Dispose(); ReleaseWriteLock(); } } }
public List <SearchResult> Search(string queryString, out int totalHits) { var l = new List <SearchResult>(); // Parse the query - assuming it's not a single term but an actual query string // Note the QueryParser used is using the same analyzer used for indexing var query = queryParser.Parse(queryString); var _totalHits = 0; // Execute the search with a fresh indexSearcher searcherManager.MaybeRefreshBlocking(); var searcher = searcherManager.Acquire(); try { var topDocs = searcher.Search(query, 10); _totalHits = topDocs.TotalHits; foreach (var result in topDocs.ScoreDocs) { var doc = searcher.Doc(result.Doc); l.Add(new SearchResult { Name = doc.GetField("name")?.GetStringValue(), Description = doc.GetField("description")?.GetStringValue(), Url = doc.GetField("url")?.GetStringValue(), // Results are automatically sorted by relevance Score = result.Score, }); } } catch (Exception e) { Console.WriteLine(e.ToString()); } finally { searcherManager.Release(searcher); searcher = null; // Don't use searcher after this point! } totalHits = _totalHits; return(l); }
public IList <LuceneSearchResult> GetSourcesLikeThis(int sourceId, int numResults) { IList <LuceneSearchResult> results = new List <LuceneSearchResult>(); using (SearcherManager manager = new SearcherManager(SourceIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; Query query = NumericRangeQuery.NewIntRange("Id", sourceId, sourceId, true, true); this.topDocs = this.searcher.Search(query, null, 1); if (this.topDocs != null && this.topDocs.ScoreDocs != null && this.topDocs.ScoreDocs.Length > 0) { // run second search using MoreLikeThis query using (IndexReader reader = IndexReader.Open(SourceIndexWriterSingleton.Directory, true)) { int maxDoc = reader.MaxDoc; MoreLikeThis mlt = new MoreLikeThis(reader); mlt.SetFieldNames(new string[] { "FileData" }); mlt.MinTermFreq = 1; mlt.MinDocFreq = 1; BooleanQuery bq = new BooleanQuery(); bq.Add(mlt.Like(this.topDocs.ScoreDocs[0].Doc), Occur.MUST); bq.Add(query, Occur.MUST_NOT); log.Info("More like this query: " + bq.ToString()); TopDocs similarDocs = this.searcher.Search(bq, numResults); if (similarDocs.TotalHits > 0) { foreach (ScoreDoc scoreDoc in similarDocs.ScoreDocs) { results.Add(new LuceneSearchResult(this.searcher.Doc(scoreDoc.Doc), scoreDoc.Score, similarDocs.TotalHits)); } } } } } return(results); }
public SearchResults Search(string queryString) { int resultsPerPage = 10; Query query = BuildQuery(queryString); searchManager.MaybeRefreshBlocking(); IndexSearcher searcher = searchManager.Acquire(); try { TopDocs topdDocs = searcher.Search(query, resultsPerPage); return(CompileResults(searcher, topdDocs)); } finally { searchManager.Release(searcher); searcher = null; } }
public async Task <IEnumerable <SearchResult> > Search(IDictionary <string, string> matches) { var results = new List <SearchResult>(); using (Directory directory = FSDirectory.Open(this._directory)) { SearcherManager searchManager = new SearcherManager(directory, null); searchManager.MaybeRefreshBlocking(); IndexSearcher indexSearcher = searchManager.Acquire(); var booleanQuery = new BooleanQuery(); foreach (var match in matches) { booleanQuery.Add(new TermQuery(new Term(match.Key, match.Value)), Occur.MUST); } var hits = indexSearcher.Search(booleanQuery, int.MaxValue); var scored = hits.ScoreDocs; foreach (var item in scored) { Document doc = indexSearcher.Doc(item.Doc); string url = doc.Get("Url"); var content = await this._contentService.GetContent(url); results.Add(new SearchResult { Url = url, PreviewText = doc.Get("Body"), Score = item.Score, Content = content }); } } return(results); }
public SearchResults Search(string queryString) { int resultsPerPage = 10; List <FieldDefinition> fields = new List <FieldDefinition> { new FieldDefinition { Name = "title", IsDefault = true }, new FieldDefinition { Name = "rating" }, new FieldDefinition { Name = "description" } }; Query query = BuildQuery(queryString, fields); searcherManager.MaybeRefreshBlocking(); IndexSearcher searcher = searcherManager.Acquire(); try { TopDocs topDocs = searcher.Search(query, resultsPerPage); if (topDocs.TotalHits < 1) { Console.WriteLine("No result found with TermQuery, Calling Prefix Query"); query = BuildQuery(queryString, fields, true); topDocs = searcher.Search(query, resultsPerPage); } if (topDocs.TotalHits < 1) { Console.WriteLine("No result found with PrefixQuery, Calling Fuzzy Query"); query = BuildQuery(queryString, fields, true, true); topDocs = searcher.Search(query, resultsPerPage); } return(CompileResults(searcher, topDocs)); } finally { searcherManager.Release(searcher); searcher = null; } }
/// <summary> /// Retrieve suggestions, specifying whether all terms /// must match (<paramref name="allTermsRequired"/>) and whether the hits /// should be highlighted (<paramref name="doHighlight"/>). /// </summary> public virtual IList <LookupResult> DoLookup(string key, IEnumerable <BytesRef> contexts, int num, bool allTermsRequired, bool doHighlight) { if (m_searcherMgr == null) { throw new InvalidOperationException("suggester was not built"); } Occur occur; if (allTermsRequired) { occur = Occur.MUST; } else { occur = Occur.SHOULD; } TokenStream ts = null; BooleanQuery query; var matchedTokens = new HashSet <string>(); string prefixToken = null; try { ts = m_queryAnalyzer.GetTokenStream("", new StringReader(key)); //long t0 = System.currentTimeMillis(); ts.Reset(); var termAtt = ts.AddAttribute <ICharTermAttribute>(); var offsetAtt = ts.AddAttribute <IOffsetAttribute>(); string lastToken = null; query = new BooleanQuery(); int maxEndOffset = -1; matchedTokens = new HashSet <string>(); while (ts.IncrementToken()) { if (lastToken != null) { matchedTokens.Add(lastToken); query.Add(new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)), occur); } lastToken = termAtt.ToString(); if (lastToken != null) { maxEndOffset = Math.Max(maxEndOffset, offsetAtt.EndOffset); } } ts.End(); if (lastToken != null) { Query lastQuery; if (maxEndOffset == offsetAtt.EndOffset) { // Use PrefixQuery (or the ngram equivalent) when // there was no trailing discarded chars in the // string (e.g. whitespace), so that if query does // not end with a space we show prefix matches for // that token: lastQuery = GetLastTokenQuery(lastToken); prefixToken = lastToken; } else { // Use TermQuery for an exact match if there were // trailing discarded chars (e.g. whitespace), so // that if query ends with a space we only show // exact matches for that term: matchedTokens.Add(lastToken); lastQuery = new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)); } if (lastQuery != null) { query.Add(lastQuery, occur); } } if (contexts != null) { BooleanQuery sub = new BooleanQuery(); query.Add(sub, Occur.MUST); foreach (BytesRef context in contexts) { // NOTE: we "should" wrap this in // ConstantScoreQuery, or maybe send this as a // Filter instead to search, but since all of // these are MUST'd, the change to the score won't // affect the overall ranking. Since we indexed // as DOCS_ONLY, the perf should be the same // either way (no freq int[] blocks to decode): // TODO: if we had a BinaryTermField we could fix // this "must be valid ut8f" limitation: sub.Add(new TermQuery(new Term(CONTEXTS_FIELD_NAME, context.Utf8ToString())), Occur.SHOULD); } } } finally { IOUtils.CloseWhileHandlingException(ts); } // TODO: we could allow blended sort here, combining // weight w/ score. Now we ignore score and sort only // by weight: Query finalQuery = FinishQuery(query, allTermsRequired); //System.out.println("finalQuery=" + query); // Sort by weight, descending: TopFieldCollector c = TopFieldCollector.Create(SORT, num, true, false, false, false); // We sorted postings by weight during indexing, so we // only retrieve the first num hits now: ICollector c2 = new EarlyTerminatingSortingCollector(c, SORT, num); IndexSearcher searcher = m_searcherMgr.Acquire(); IList <LookupResult> results = null; try { //System.out.println("got searcher=" + searcher); searcher.Search(finalQuery, c2); TopFieldDocs hits = (TopFieldDocs)c.GetTopDocs(); // Slower way if postings are not pre-sorted by weight: // hits = searcher.search(query, null, num, SORT); results = CreateResults(searcher, hits, num, key, doHighlight, matchedTokens, prefixToken); } finally { m_searcherMgr.Release(searcher); } //System.out.println((System.currentTimeMillis() - t0) + " msec for infix suggest"); //System.out.println(results); return(results); }
public virtual void TestReferenceDecrementIllegally() { Directory dir = NewDirectory(); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new ConcurrentMergeScheduler())); SearcherManager sm = new SearcherManager(writer, false, new SearcherFactory()); writer.AddDocument(new Document()); writer.Commit(); sm.MaybeRefreshBlocking(); IndexSearcher acquire = sm.Acquire(); IndexSearcher acquire2 = sm.Acquire(); sm.Release(acquire); sm.Release(acquire2); acquire = sm.Acquire(); acquire.IndexReader.DecRef(); sm.Release(acquire); try { sm.Acquire(); Assert.Fail("acquire should have thrown an InvalidOperationException since we modified the refCount outside of the manager"); } catch (InvalidOperationException ex) { // } // sm.Dispose(); -- already closed writer.Dispose(); dir.Dispose(); }
public virtual void TestReferenceDecrementIllegally([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler) { Directory dir = NewDirectory(); var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())) .SetMergeScheduler(scheduler); IndexWriter writer = new IndexWriter(dir, config); SearcherManager sm = new SearcherManager(writer, false, new SearcherFactory()); writer.AddDocument(new Document()); writer.Commit(); sm.MaybeRefreshBlocking(); IndexSearcher acquire = sm.Acquire(); IndexSearcher acquire2 = sm.Acquire(); sm.Release(acquire); sm.Release(acquire2); acquire = sm.Acquire(); acquire.IndexReader.DecRef(); sm.Release(acquire); Assert.Throws<InvalidOperationException>(() => sm.Acquire(), "acquire should have thrown an InvalidOperationException since we modified the refCount outside of the manager"); // sm.Dispose(); -- already closed writer.Dispose(); dir.Dispose(); }
public async Task <IEnumerable <SearchResult> > Search(string searchTerm) { var results = new List <SearchResult>(); var queryParser = SetupQueryParser(SetupAnalyzer()); var query = queryParser.Parse(searchTerm); ScoreDoc[] scored; //using (Directory directory = FSDirectory.Open(this._directory)) //{ // using (var writer = new IndexWriter(directory, // new IndexWriterConfig(LuceneVersion.LUCENE_CURRENT, SetupAnalyzer()))) // { // using(var reader = writer.GetReader(false)) // { // for (int i = 0; i < reader.MaxDoc; i++) // { // Document doc = reader.Document(i); // string type = doc.GetField("Type").GetStringValue(); // if(type == "content") // { // } // // do something with docId here... // } // } // } //} using (Directory directory = FSDirectory.Open(this._directory)) { SearcherManager searchManager = new SearcherManager(directory, null); searchManager.MaybeRefreshBlocking(); IndexSearcher indexSearcher = searchManager.Acquire(); TopDocs hits = indexSearcher.Search(query, int.MaxValue); scored = hits.ScoreDocs; foreach (var item in scored) { Document doc = indexSearcher.Doc(item.Doc); var previewText = GeneratePreviewText(query, doc.Get("Body")); string url = doc.Get("Url"); var content = await this._contentService.GetContent(url); results.Add(new SearchResult { Url = url, PreviewText = doc.Get("Body"), Score = item.Score, Content = content }); } } return(results); }
public virtual void TestEnsureOpen() { Directory dir = NewDirectory(); (new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null))).Dispose(); SearcherManager sm = new SearcherManager(dir, null); IndexSearcher s = sm.Acquire(); sm.Dispose(); // this should succeed; sm.Release(s); try { // this should fail sm.Acquire(); } catch (AlreadyClosedException e) { // ok } try { // this should fail sm.MaybeRefresh(); } catch (AlreadyClosedException e) { // ok } dir.Dispose(); }
/* * LUCENE-3528 - NRTManager hangs in certain situations */ public virtual void TestThreadStarvationNoDeleteNRTReader() { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); conf.SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES); Directory d = NewDirectory(); CountDownLatch latch = new CountDownLatch(1); CountDownLatch signal = new CountDownLatch(1); LatchedIndexWriter _writer = new LatchedIndexWriter(d, conf, latch, signal); TrackingIndexWriter writer = new TrackingIndexWriter(_writer); SearcherManager manager = new SearcherManager(_writer, false, null); Document doc = new Document(); doc.Add(NewTextField("test", "test", Field.Store.YES)); writer.AddDocument(doc); manager.MaybeRefresh(); ThreadClass t = new ThreadAnonymousInnerClassHelper(this, latch, signal, writer, manager); t.Start(); _writer.WaitAfterUpdate = true; // wait in addDocument to let some reopens go through long lastGen = writer.UpdateDocument(new Term("foo", "bar"), doc); // once this returns the doc is already reflected in the last reopen Assert.IsFalse(manager.SearcherCurrent); // false since there is a delete in the queue IndexSearcher searcher = manager.Acquire(); try { Assert.AreEqual(2, searcher.IndexReader.NumDocs()); } finally { manager.Release(searcher); } ControlledRealTimeReopenThread<IndexSearcher> thread = new ControlledRealTimeReopenThread<IndexSearcher>(writer, manager, 0.01, 0.01); thread.Start(); // start reopening if (VERBOSE) { Console.WriteLine("waiting now for generation " + lastGen); } AtomicBoolean finished = new AtomicBoolean(false); ThreadClass waiter = new ThreadAnonymousInnerClassHelper2(this, lastGen, thread, finished); waiter.Start(); manager.MaybeRefresh(); waiter.Join(1000); if (!finished.Get()) { waiter.Interrupt(); Assert.Fail("thread deadlocked on waitForGeneration"); } thread.Dispose(); thread.Join(); IOUtils.Close(manager, _writer, d); }
// LUCENE-5461 public virtual void TestCRTReopen() { //test behaving badly //should be high enough int maxStaleSecs = 20; //build crap data just to store it. string s = " abcdefghijklmnopqrstuvwxyz "; char[] chars = s.ToCharArray(); StringBuilder builder = new StringBuilder(2048); for (int i = 0; i < 2048; i++) { builder.Append(chars[Random().Next(chars.Length)]); } string content = builder.ToString(); SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); Directory dir = new NRTCachingDirectory(NewFSDirectory(CreateTempDir("nrt")), 5, 128); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_46, new MockAnalyzer(Random())); config.SetIndexDeletionPolicy(sdp); config.SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE_OR_APPEND); IndexWriter iw = new IndexWriter(dir, config); SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory()); TrackingIndexWriter tiw = new TrackingIndexWriter(iw); ControlledRealTimeReopenThread<IndexSearcher> controlledRealTimeReopenThread = new ControlledRealTimeReopenThread<IndexSearcher>(tiw, sm, maxStaleSecs, 0); controlledRealTimeReopenThread.SetDaemon(true); controlledRealTimeReopenThread.Start(); IList<Thread> commitThreads = new List<Thread>(); for (int i = 0; i < 500; i++) { if (i > 0 && i % 50 == 0) { Thread commitThread = new Thread(new RunnableAnonymousInnerClassHelper(this, sdp, dir, iw)); commitThread.Start(); commitThreads.Add(commitThread); } Document d = new Document(); d.Add(new TextField("count", i + "", Field.Store.NO)); d.Add(new TextField("content", content, Field.Store.YES)); long start = DateTime.Now.Millisecond; long l = tiw.AddDocument(d); controlledRealTimeReopenThread.WaitForGeneration(l); long wait = DateTime.Now.Millisecond - start; Assert.IsTrue(wait < (maxStaleSecs * 1000), "waited too long for generation " + wait); IndexSearcher searcher = sm.Acquire(); TopDocs td = searcher.Search(new TermQuery(new Term("count", i + "")), 10); sm.Release(searcher); Assert.AreEqual(1, td.TotalHits); } foreach (Thread commitThread in commitThreads) { commitThread.Join(); } controlledRealTimeReopenThread.Dispose(); sm.Dispose(); iw.Dispose(); dir.Dispose(); }
public IList <LuceneSearchResult> Search(string term, int numResults, AdminUser user, string sortField, bool descending) { using (SearcherManager manager = new SearcherManager(RequestIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; Query query = null; if (string.IsNullOrEmpty(term)) { query = new MatchAllDocsQuery(); } else { QueryParser parser; if (term.Trim().StartsWith("Id")) { // Single and ranged numeric value search on Id field parser = new NumericQueryParser(Lucene.Net.Util.Version.LUCENE_30, "Id", new KeywordAnalyzer()); } else { // General search across text fields parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "ReferenceNumber", "RequestName", "RequestEntity", "RequestType", "CurrentStatus" }, new LowerCaseAnalyzer()); parser.DefaultOperator = QueryParser.Operator.AND; if (!string.IsNullOrEmpty(term)) { if (!term.Contains(':')) { // Edit user's search string and add wildcards. term = string.Join(" ", term.Split(new string[] { " " }, System.StringSplitOptions.RemoveEmptyEntries) .Select(x => "*" + x + "*") .ToArray() ); } } } parser.AllowLeadingWildcard = true; try { query = parser.Parse(term); } catch (ParseException e) { log.Error("Encountered problem parsing the search term: " + term, e); return(new List <LuceneSearchResult>()); } } // when given a user, assumes results must be filtered if (user != null && user.GetRequestEntity() != null) { BooleanQuery bq = new BooleanQuery(); bq.Add(query, Occur.MUST); BooleanQuery bq2 = new BooleanQuery(); bq2.Add(new TermQuery(new Term("RequestEntity", user.GetRequestEntity().RequestEntityName)), Occur.SHOULD); bq2.Add(new TermQuery(new Term("CreatorRequestEntity", user.GetRequestEntity().RequestEntityName)), Occur.SHOULD); bq2.Add(new TermQuery(new Term("Creator", user.UserID)), Occur.SHOULD); bq.Add(bq2, Occur.MUST); query = bq; } log.Debug("Search query: " + query.ToString()); this.PerformSearch(query, numResults, sortField, descending); return(TransformTopDocs()); } }
public IList <LuceneSearchResult> Search(string term, DateTime?start, DateTime?end, int numResults, string sortField, bool descending) { using (SearcherManager manager = new SearcherManager(EventIndexWriterSingleton.Instance)) { this.searcher = manager.Acquire().Searcher; QueryParser parser; if (!string.IsNullOrEmpty(term) && term.Trim().StartsWith("Id")) { // Single and ranged numeric value search on Id field parser = new NumericQueryParser(Lucene.Net.Util.Version.LUCENE_30, "Id", new KeywordAnalyzer()); } else { // General search across text fields parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new string[] { "Name", "JhroCaseNumber", "Violation", "NarrativeEn", "NarrativeFr", "Location", "Notes", "StartDateDisplay", "EndDateDisplay" }, new LowerCaseAnalyzer()); parser.DefaultOperator = QueryParser.Operator.AND; if (!string.IsNullOrEmpty(term)) { if (!term.Contains(':')) { // Edit user's search string and add wildcards. term = string.Join(" ", term.Split(new string[] { " " }, System.StringSplitOptions.RemoveEmptyEntries) .Select(x => "*" + x + "*") .ToArray() ); } } } parser.AllowLeadingWildcard = true; try { Query query = null; if (!string.IsNullOrEmpty(term)) { query = parser.Parse(term); } if (start.HasValue || end.HasValue) { long?min = null; if (start.HasValue) { min = start.Value.Ticks; } long?max = null; if (end.HasValue) { max = end.Value.Ticks; } BooleanQuery bq = new BooleanQuery(); if (query != null) { bq.Add(query, Occur.MUST); } BooleanQuery bq2 = new BooleanQuery(); bq2.Add(NumericRangeQuery.NewLongRange("StartDateSearch", min, max, true, true), Occur.SHOULD); bq2.Add(NumericRangeQuery.NewLongRange("EndDateSearch", min, max, true, true), Occur.SHOULD); bq.Add(bq2, Occur.MUST); query = bq; } log.Debug("Search query: " + query.ToString()); this.PerformSearch(query, numResults, sortField, descending); return(TransformTopDocs()); } catch (ParseException e) { log.Error("Encountered problem parsing the search term: " + term, e); return(new List <LuceneSearchResult>()); } } }