public ActionResult Search(string query) { ViewData["Message"] = "query : " + query; var searcher = new IndexSearcher( new Lucene.Net.Store.SimpleFSDirectory(new DirectoryInfo(Configuration.IndexDirectory)), readOnly: true); var fieldsToSearchIn = new[] {Configuration.Fields.Name, Configuration.Fields.Description}; var queryanalizer = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fieldsToSearchIn, new BrazilianAnalyzer()); var numberOfResults = 10; var top10Results = searcher.Search(queryanalizer.Parse(query), numberOfResults); var docs = new List<DocumentViewModel>(); foreach (var scoreDoc in top10Results.scoreDocs) { var document = searcher.Doc(scoreDoc.doc); var name = document.GetField(Configuration.Fields.Name).StringValue(); var description = document.GetField(Configuration.Fields.Description).StringValue(); var link = document.GetField(Configuration.Fields.Link).StringValue(); docs.Add(new DocumentViewModel(name, description, link)); } return View(new SearchViewModel(docs)); }
public SearchResults Search(DocumentRoot root, string term) { var results = new SearchResults(); var indexPath = _settings.GetSearchIndexPath(); var version = Lucene.Net.Util.Version.LUCENE_30; using (var directory = FSDirectory.Open(new DirectoryInfo(indexPath))) using (var indexReader = IndexReader.Open(directory, true)) using (var indexSearch = new IndexSearcher(indexReader)) { var analyzer = new StandardAnalyzer(version); var queryParser = new MultiFieldQueryParser(version, new[] { "Title", "Body" }, analyzer); var query = queryParser.Parse(term); var resultDocs = indexSearch.Search(query, indexReader.MaxDoc); var hits = resultDocs.ScoreDocs; foreach (var hit in hits) { var doc = indexSearch.Doc(hit.Doc); results.Documents.Add(new SearchResult { Score = hit.Score, Snippet = doc.Get("Snippet"), Title = doc.Get("Title") }); } } return results; }
private static Query ParseQuery(string searchTerm) { var fields = new Dictionary<string, float> { { "Id", 1.2f }, { "Title", 1.0f }, { "Tags", 1.0f}, { "Description", 0.8f }, { "Author", 0.6f } }; var analyzer = new StandardAnalyzer(LuceneCommon.LuceneVersion); searchTerm = QueryParser.Escape(searchTerm).ToLowerInvariant(); var queryParser = new MultiFieldQueryParser(LuceneCommon.LuceneVersion, fields.Keys.ToArray(), analyzer, fields); var conjuctionQuery = new BooleanQuery(); conjuctionQuery.SetBoost(1.5f); var disjunctionQuery = new BooleanQuery(); var wildCardQuery = new BooleanQuery(); wildCardQuery.SetBoost(0.7f); var exactIdQuery = new TermQuery(new Term("Id-Exact", searchTerm)); exactIdQuery.SetBoost(2.5f); foreach(var term in searchTerm.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries)) { conjuctionQuery.Add(queryParser.Parse(term), BooleanClause.Occur.MUST); disjunctionQuery.Add(queryParser.Parse(term), BooleanClause.Occur.SHOULD); foreach (var field in fields) { var wildCardTermQuery = new WildcardQuery(new Term(field.Key, term + "*")); wildCardTermQuery.SetBoost(0.7f * field.Value); wildCardQuery.Add(wildCardTermQuery, BooleanClause.Occur.SHOULD); } } return conjuctionQuery.Combine(new Query[] { exactIdQuery, conjuctionQuery, disjunctionQuery, wildCardQuery }); }
public IList<MedicalConsultory> Search(string searchText) { if (string.IsNullOrWhiteSpace(searchText)) return new List<MedicalConsultory>(); using (var searcher = new IndexSearcher(_indexDirectoryPath, true)) { var analyzer = new StandardAnalyzer(_version); // Multi field search var query = new BooleanQuery(); var parser = new MultiFieldQueryParser(_version, new[] { "SpecialtyName", "City" }, analyzer); searchText.RemoveIrrelevantTerms() .Split(' ') .ToList() .ForEach(word => { query.Add(parser.Parse(word), Occur.SHOULD); }); var hits = searcher.Search(query, null, searcher.MaxDoc, Sort.RELEVANCE).ScoreDocs; // Simple field search //var parser = new QueryParser(Version.LUCENE_30, "SpecialtyName", analyzer); //var query = parser.Parse(searchText.Trim()); //var hits = searcher.Search(query, searcher.MaxDoc).ScoreDocs; var results = hits.Select(hit => MapMedicalConsultory(hit, searcher)).ToList(); analyzer.Close(); searcher.Dispose(); return results; } }
public Query Get(ProductSearchQuery searchQuery) { var booleanQuery = new BooleanQuery { ProductSearchPublishedDefinition.PublishedOnly }; if (!searchQuery.Options.Any() && !searchQuery.Specifications.Any() && Math.Abs(searchQuery.PriceFrom - 0) < 0.01 && !searchQuery.PriceTo.HasValue && !searchQuery.CategoryId.HasValue && string.IsNullOrWhiteSpace(searchQuery.SearchTerm) && !searchQuery.BrandId.HasValue) { return booleanQuery; } if (searchQuery.Options.Any()) booleanQuery.Add(GetOptionsQuery(searchQuery.Options), Occur.MUST); if (searchQuery.Specifications.Any()) booleanQuery.Add(GetSpecificationsQuery(searchQuery.Specifications), Occur.MUST); if (searchQuery.CategoryId.HasValue) booleanQuery.Add(GetCategoriesQuery(searchQuery.CategoryId.Value), Occur.MUST); if (searchQuery.PriceFrom > 0 || searchQuery.PriceTo.HasValue) booleanQuery.Add(GetPriceRangeQuery(searchQuery), Occur.MUST); if (!String.IsNullOrWhiteSpace(searchQuery.SearchTerm)) { IndexDefinition indexDefinition = IndexingHelper.Get<ProductSearchIndex>(); Analyzer analyser = indexDefinition.GetAnalyser(); var parser = new MultiFieldQueryParser(Version.LUCENE_30, indexDefinition.SearchableFieldNames, analyser); Query query = searchQuery.SearchTerm.SafeGetSearchQuery(parser, analyser); booleanQuery.Add(query, Occur.MUST); } if (searchQuery.BrandId.HasValue) booleanQuery.Add(GetBrandQuery(searchQuery.BrandId.Value), Occur.MUST); return booleanQuery; }
public List<Advert> SearchAds(string query) { if (_searcher == null) return null; ICollection fields = _searcher.Reader.GetFieldNames(IndexReader.FieldOption.ALL); List<string> fldList = new List<string>(); foreach (DictionaryEntry f in fields) { fldList.Add(f.Key.ToString()); } List<Advert> adverts = new List<Advert>(); MultiFieldQueryParser parser = new MultiFieldQueryParser(fldList.ToArray(), _analyzer); Query q = parser.Parse(query); Hits hits = _searcher.Search(q); PropertyDescriptors desc = new PropertyDescriptors(); desc.LoadData(System.Windows.Forms.Application.StartupPath + "\\PropertyDescriptors.xml"); for (int i = 0; i < hits.Length(); i++) { Advert ad = new Advert(); Document doc = hits.Doc(i); foreach (Field f in doc.Fields()) { string temp = desc.GetDisplayableFormat(f.Name(), f.StringValue()); ad[f.Name()] = temp; } adverts.Add(ad); } return adverts; }
public ProjectData[] Search(string searchTerm) { IndexSearcher searcher = new IndexSearcher(luceneIndexDirectory); IntegralCollector searcherCollector = new IntegralCollector(); // Setup the fields to search through string[] searchfields = new string[] { "name", "vessel" }; // Build our booleanquery that will be a combination of all the queries for each individual search term var finalQuery = new BooleanQuery(); var parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, searchfields, analyzer); // Split the search string into separate search terms by word string[] terms = searchTerm.Split(new[] { " " }, StringSplitOptions.RemoveEmptyEntries); foreach (string term in terms) finalQuery.Add(parser.Parse(term.Replace("~", "") + "~"),Occur.SHOULD); searcher.Search(finalQuery, searcherCollector); var results = new ProjectData[searcherCollector.Docs.Count]; for (int i = 0; i < searcherCollector.Docs.Count; i++) { var doc = searcher.Doc(searcherCollector.Docs[i]); results[i] = new ProjectData(doc.Get("name"), doc.Get("vessel")); } return results; }
public Query GetQuery() { var booleanQuery = new BooleanQuery { { new TermRangeQuery( FieldDefinition.GetFieldName<PublishedOnFieldDefinition>(), null, DateTools.DateToString(CurrentRequestData.Now, DateTools.Resolution.SECOND), false, true), Occur.MUST } }; if (!String.IsNullOrWhiteSpace(Term)) { var indexDefinition = IndexingHelper.Get<WebpageSearchIndexDefinition>(); var analyser = indexDefinition.GetAnalyser(); var parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, indexDefinition.SearchableFieldNames, analyser); Query query = Term.SafeGetSearchQuery(parser, analyser); booleanQuery.Add(query, Occur.MUST); } if (CreatedOnFrom.HasValue || CreatedOnTo.HasValue) booleanQuery.Add(GetDateQuery(), Occur.MUST); if (!string.IsNullOrEmpty(Type)) booleanQuery.Add(new TermQuery(new Term(FieldDefinition.GetFieldName<TypeFieldDefinition>(), Type)), Occur.MUST); if (Parent != null) booleanQuery.Add( new TermQuery(new Term(FieldDefinition.GetFieldName<ParentIdFieldDefinition>(), Parent.Id.ToString())), Occur.MUST); return booleanQuery; }
public Query GetQuery() { if (String.IsNullOrWhiteSpace(Term) && String.IsNullOrWhiteSpace(Type) && !CreatedOnTo.HasValue && !CreatedOnFrom.HasValue && Parent == null) return new MatchAllDocsQuery(); var booleanQuery = new BooleanQuery(); if (!String.IsNullOrWhiteSpace(Term)) { var indexDefinition = IndexingHelper.Get<AdminWebpageIndexDefinition>(); var analyser = indexDefinition.GetAnalyser(); var parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, indexDefinition.SearchableFieldNames, analyser); Query query = Term.SafeGetSearchQuery(parser, analyser); booleanQuery.Add(query, Occur.MUST); } if (CreatedOnFrom.HasValue || CreatedOnTo.HasValue) booleanQuery.Add(GetDateQuery(), Occur.MUST); if (!string.IsNullOrEmpty(Type)) booleanQuery.Add(new TermQuery(new Term(FieldDefinition.GetFieldName<TypeFieldDefinition>(), Type)), Occur.MUST); if (Parent != null) booleanQuery.Add( new TermQuery(new Term(FieldDefinition.GetFieldName<ParentIdFieldDefinition>(), Parent.Id.ToString())), Occur.MUST); return booleanQuery; }
public static SearchResults Query(HttpContext ctx, string query, int page = 1) { if (string.IsNullOrWhiteSpace(query)) { return new SearchResults { Documents = new SearchResult[0], TotalCount = 0 }; } var indexPath = ctx.Server.MapPath("~/App_Data/Index"); var indexSearcher = new DirectoryIndexSearcher(new DirectoryInfo(indexPath)); using (var searchService = new SearchService(indexSearcher)) { var parser = new MultiFieldQueryParser( Lucene.Net.Util.Version.LUCENE_29, new[] { "Text" }, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29)); Query multiQuery = GetSafeQuery(parser, query); var result = searchService.SearchIndex(multiQuery); return new SearchResults { Documents = result.Results .Skip(PageSize*(page - 1)) .Take(PageSize) .Select(d => new SearchResult { Url = d.Get("Url"), Title = d.Get("Title"), Summary = d.Get("Summary") }), TotalCount = result.Results.Count() }; } }
public Data searchLucene(Data data) { Search_gl search = new Search_gl(); List<string> item = new List<string>(); Lucene.Net.Store.Directory directory = FSDirectory.Open(new DirectoryInfo(Environment.CurrentDirectory + "\\LuceneIndex")); var analyzer = new StandardAnalyzer(Version.LUCENE_29); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); //QueryParser queryParser = new QueryParser(Version.LUCENE_29, "summary", analyzer); //search for single field MultiFieldQueryParser parser = new MultiFieldQueryParser(new string[] {"name", "summary"}, analyzer); //search for multifield Query query = parser.Parse((data.getString("search")) + "*"); //cant search blank text with wildcard as first character TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().ScoreDocs; int count = hits.Length; for (int i = 0; i < count; i++) { int docId = hits[i].doc; float score = hits[i].score; Document doc = searcher.Doc(docId); string id = doc.Get("id"); item.Add(id); } Data list = search.search(data, item.ToArray()); reader.Close(); searcher.Close(); return list; }
IEnumerable<string> Search(string textToFind) { var reader = IndexReader.Open(_ramDirectory, true); var searcher = new IndexSearcher(reader); var analyzer = new StandardAnalyzer(Version.LUCENE_29); var parser = new MultiFieldQueryParser(Version.LUCENE_29, _searchFields, analyzer); var query = parser.Parse(textToFind); var collector = TopScoreDocCollector.create(100, true); searcher.Search(query, collector); var hits = collector.TopDocs().scoreDocs; var foundKeys = new List<string>(); foreach (ScoreDoc scoreDoc in hits) { var document = searcher.Doc(scoreDoc.doc); var key = document.Get(_keyField); if (key != null && !foundKeys.Contains(key)) { foundKeys.Add(key); } } reader.Close(); searcher.Close(); analyzer.Close(); return foundKeys; }
// main search method private static IEnumerable<SampleData> _search(string searchQuery, string searchField = "") { // validation if (string.IsNullOrEmpty(searchQuery.Replace("*", "").Replace("?", ""))) return new List<SampleData>(); // set up lucene searcher using (var searcher = new IndexSearcher(_directory, false)) { var hits_limit = 1000; var analyzer = new StandardAnalyzer(Version.LUCENE_29); // search by single field if (!string.IsNullOrEmpty(searchField)) { var parser = new QueryParser(Version.LUCENE_29, searchField, analyzer); var query = parseQuery(searchQuery, parser); var hits = searcher.Search(query, hits_limit).ScoreDocs; var results = _mapLuceneToDataList(hits, searcher); analyzer.Close(); searcher.Close(); searcher.Dispose(); return results; } // search by multiple fields (ordered by RELEVANCE) else { var parser = new MultiFieldQueryParser (Version.LUCENE_29, new[] {"Id", "Name", "Description"}, analyzer); var query = parseQuery(searchQuery, parser); var hits = searcher.Search(query, null, hits_limit, Sort.INDEXORDER).ScoreDocs; var results = _mapLuceneToDataList(hits, searcher); analyzer.Close(); searcher.Close(); searcher.Dispose(); return results; } } }
public override List<ISearchEntity> GetSearchResult(out int MatchCount) { Analyzer analyzer = new StandardAnalyzer(); IndexSearcher searcher = new IndexSearcher(searchInfo.ConfigElement.IndexDirectory); MultiFieldQueryParser parserName = new MultiFieldQueryParser(new string[] { "title", "content", "keywords" }, analyzer); Query queryName = parserName.Parse(searchInfo.QueryString); Hits hits = searcher.Search(queryName); List<ISearchEntity> ResultList = new List<ISearchEntity>(); for (int i = 0; i < hits.Length(); i++) { Document doc = hits.Doc(i); ResultList.Add((ISearchEntity)new NewsModel() { EntityIdentity = Convert.ToInt32(doc.Get("newsid")), Title = Convert.ToString(doc.Get("title")), Content = Convert.ToString(doc.Get("content")), Keywords = doc.Get("keywords") }); } searcher.Close(); MatchCount = hits.Length(); return ResultList; }
public Data searchLucene(Data data) { Account_lg account = new Account_lg(); List<string> item = new List<string>(); Lucene.Net.Store.Directory directory = FSDirectory.Open(new DirectoryInfo("C:\\Visual Studio 2010\\Transaction" + "\\LuceneIndex")); var analyzer = new StandardAnalyzer(Version.LUCENE_29); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); MultiFieldQueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new string[] { "name", "username" }, analyzer); //search for multifield Query query = parser.Parse((data.getString("search")) + "*"); //cant search blank text with wildcard as first character TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, true); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().ScoreDocs; int count = hits.Length; for (int i = 0; i < count; i++) { int docId = hits[i].Doc; float score = hits[i].Score; Document doc = searcher.Doc(docId); string id = doc.Get("id"); item.Add(id); } Data list = account.selectUser(data, item.ToArray()); reader.Dispose(); searcher.Dispose(); return list; }
public void RemoveFromIndex(string[] keys, string value) { var analyzer = new AccentedAnalyzer(); var query = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, keys, analyzer); var directory = FSDirectory.Open(System.IO.Directory.GetParent(IndexPath)); IndexReader idxReader = IndexReader.Open(indexPath); var searcher = new IndexSearcher(directory, true); query.SetDefaultOperator(QueryParser.Operator.AND); var q = query.Parse(value); int top = idxReader.MaxDoc(); var results = TopScoreDocCollector.create(top, true); searcher.Search(q, results); ScoreDoc[] hits = results.TopDocs().scoreDocs; Document[] documents = new Document[hits.Length]; IndexReader indexReader = null; for (int i = 0; i < hits.Length; i++) { int docId = hits[i].doc; indexReader = IndexReader.Open(directory, false); indexReader.DeleteDocument(docId); indexReader.Commit(); indexReader.Flush(); indexReader.Close(); } searcher.Close(); directory.Close(); }
/// <summary> /// Performs the explanation. /// </summary> /// <param name="luceneVersion">The lucene version.</param> /// <param name="fsDirectory">The fs directory.</param> /// <param name="searchQuery">The search query.</param> /// <param name="resultId">The result identifier.</param> /// <returns></returns> protected virtual string PerformExplain(Version luceneVersion, FSDirectory fsDirectory, string searchQuery, int resultId) { /* * The obvious problem here is that we're not using the exact same search as the real one. */ var explanation = string.Empty; using (var indexSearcher = new IndexSearcher(fsDirectory, false)) { var analyzer = new StandardAnalyzer(luceneVersion); var queryParser = new MultiFieldQueryParser(luceneVersion, new[] { "Id".ToLowerInvariant() }, analyzer) { DefaultOperator = QueryParser.Operator.AND }; var query = this.searchQueryParser.ParseQuery(searchQuery, queryParser); explanation = indexSearcher.Explain(query, resultId).ToHtml(); analyzer.Close(); } return explanation; }
public virtual void TestSimple() { string[] fields = new string[]{"b", "t"}; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query q = mfqp.Parse("one"); Assert.AreEqual("b:one t:one", q.ToString()); q = mfqp.Parse("one two"); Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString()); q = mfqp.Parse("+one +two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("+one -two -three"); Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString()); q = mfqp.Parse("one^2 two"); Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~ two"); Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~0.8 two^2"); Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString()); q = mfqp.Parse("one* two*"); Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString()); q = mfqp.Parse("[a TO c] two"); Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString()); q = mfqp.Parse("w?ldcard"); Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString()); q = mfqp.Parse("\"foo bar\""); Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString()); q = mfqp.Parse("\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString()); // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field. q = mfqp.Parse("b:\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4", q.ToString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two"); Assert.AreEqual("(b:one t:one) f:two", q.ToString()); // AND mode: mfqp.DefaultOperator = QueryParser.AND_OPERATOR; q = mfqp.Parse("one two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString()); }
public void Should_parse_using_lucene_query() { var parser = new MultiFieldQueryParser(Version.LUCENE_29, new[] {"Name"}, new StandardAnalyzer(Version.LUCENE_29)); var query = parser.Parse("Switzerland"); var q = SearchSession.CreateFullTextQuery(query, typeof (Country)) .List<Country>(); Assert.AreEqual(1, q.Count); }
private LQ.QueryParser GetQueryParser() { var parser = new LQ.MultiFieldQueryParser(Version.LUCENE_30, new[] { SearchFields.Body, SearchFields.Title, SearchFields.Tag }, _analyzer); parser.DefaultOperator = LQ.QueryParser.Operator.AND; return(parser); }
public Query GenerateLuceneQuery(string query, ILocationGroupingStrategy groupingStrategy) { var parser = new MultiFieldQueryParser(Version.LUCENE_29, groupingStrategy.Fields.Select(f => f.Name).ToArray(), _analyzer); parser.DefaultOperator = QueryParser.Operator.AND; Query luceneQuery = parser.Parse(query); return luceneQuery; }
private Hits GetHitsByFiled(string filedname,Analyzer analyzer, string searchStr, IndexSearcher searcher) { MultiFieldQueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_CURRENT, new string[] { filedname}, analyzer); Query query = parser.Parse(searchStr); Sort sort = new Sort(); SortField f = new SortField("publish_time", SortField.STRING, true);//按照publish_time字段排序,true表示降序 sort.SetSort(f); Hits hits = searcher.Search(query, sort); return hits; }
private Query GenerateLuceneQuery(string query, IEnumerable<LocationQueryField> fieldsToQuery) { var parser = new MultiFieldQueryParser(Version.LUCENE_29, fieldsToQuery.Select(f => f.Name).Distinct().ToArray(), _analyzer); parser.DefaultOperator = QueryParser.Operator.AND; parser.FuzzyPrefixLength = 3; Query luceneQuery = parser.Parse(query); return luceneQuery; }
public virtual void TestSimple() { System.String[] fields = new System.String[]{"b", "t"}; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer()); Query q = mfqp.Parse("one"); Assert.AreEqual("b:one t:one", q.ToString()); q = mfqp.Parse("one two"); Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString()); q = mfqp.Parse("+one +two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("+one -two -three"); Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString()); q = mfqp.Parse("one^2 two"); Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~ two"); Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~0.8 two^2"); Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString()); q = mfqp.Parse("one* two*"); Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString()); q = mfqp.Parse("[a TO c] two"); Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString()); q = mfqp.Parse("w?ldcard"); Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString()); q = mfqp.Parse("\"foo bar\""); Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString()); q = mfqp.Parse("\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two"); Assert.AreEqual("(b:one t:one) f:two", q.ToString()); // AND mode: mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR); q = mfqp.Parse("one two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString()); }
public async Task<IReadOnlyList<Song>> SearchSong(string searchText) { var hitsLimit = 5000; var fields = new[] { nameof(Song.Artist), nameof(Song.Name) }; var parser = new MultiFieldQueryParser(Version.LUCENE_30, fields, this.analyzer); var query = parser.Parse(searchText); var hits = await Run(() => this.searcher.Search(query, null, hitsLimit, Sort.RELEVANCE).ScoreDocs); var docs = hits.Select(x => Tuple.Create(x.Doc, this.searcher.Doc(x.Doc))); var results = this.mapper.GetSongs(docs).ToList(); return results; }
/// <summary> /// Searches the lucene index with the search text. /// </summary> /// <param name="searchText">The text to search with.</param> /// <remarks>Syntax reference: http://lucene.apache.org/java/2_3_2/queryparsersyntax.html#Wildcard</remarks> /// <exception cref="SearchException">An error occurred searching the lucene.net index.</exception> public virtual IEnumerable<SearchResultViewModel> Search(string searchText) { // This check is for the benefit of the CI builds if (!Directory.Exists(IndexPath)) CreateIndex(); List<SearchResultViewModel> list = new List<SearchResultViewModel>(); if (string.IsNullOrWhiteSpace(searchText)) return list; StandardAnalyzer analyzer = new StandardAnalyzer(LUCENEVERSION); MultiFieldQueryParser parser = new MultiFieldQueryParser(LuceneVersion.LUCENE_29, new string[] { "content", "title" }, analyzer); Query query = null; try { query = parser.Parse(searchText); } catch (Lucene.Net.QueryParsers.ParseException) { // Catch syntax errors in the search and remove them. searchText = QueryParser.Escape(searchText); query = parser.Parse(searchText); } if (query != null) { try { using (IndexSearcher searcher = new IndexSearcher(FSDirectory.Open(new DirectoryInfo(IndexPath)), true)) { TopDocs topDocs = searcher.Search(query, 1000); foreach (ScoreDoc scoreDoc in topDocs.ScoreDocs) { Document document = searcher.Doc(scoreDoc.Doc); list.Add(new SearchResultViewModel(document, scoreDoc)); } } } catch (FileNotFoundException) { // For 1.7's change to the Lucene search path. CreateIndex(); } catch (Exception ex) { throw new SearchException(ex, "An error occurred while searching the index, try rebuilding the search index via the admin tools to fix this."); } } return list; }
internal LuceneSearchCriteria(string type, Analyzer analyzer, string[] fields, bool allowLeadingWildcards, BooleanOperation occurance) { Enforcer.ArgumentNotNull(fields, "fields"); SearchIndexType = type; Query = new BooleanQuery(); this.BooleanOperation = occurance; this.QueryParser = new MultiFieldQueryParser(_luceneVersion, fields, analyzer); this.QueryParser.SetAllowLeadingWildcard(allowLeadingWildcards); this._occurance = occurance.ToLuceneOccurance(); }
public static List<Document> MultiQuery(string search, string[] searchFields) { var analyzer = new StandardAnalyzer(Version.LUCENE_29); using (var indexSearcher = new IndexSearcher(Directory, false)) { var queryParser = new MultiFieldQueryParser(Version.LUCENE_29, searchFields, analyzer); var query = queryParser.Parse(search); var hits = indexSearcher.Search(query,null,999,Sort.RELEVANCE).ScoreDocs; analyzer.Close(); return hits.Select(hit => indexSearcher.Doc(hit.Doc)).ToList(); } }
public PageQuery WithKeywords(String keywords) { if (!String.IsNullOrEmpty(keywords)) { QueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, Fields, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29)); Query multiQuery = parser.Parse(keywords); this.AddQuery(multiQuery); } return this; }
public virtual ActionResult SearchIndex(string term) { //Setup indexer Directory directory = FSDirectory.GetDirectory("LuceneIndex", true); Analyzer analyzer = new StandardAnalyzer(); IndexWriter writer = new IndexWriter(directory, analyzer, true); IndexReader red = IndexReader.Open(directory); int totDocs = red.MaxDoc(); red.Close(); foreach (var ticket in _ticketRepo.GetTicketsByProject(CurrentProject, 0, 1000).Items) { AddListingToIndex(ticket, writer); } writer.Optimize(); //Close the writer writer.Close(); //Setup searcher IndexSearcher searcher = new IndexSearcher(directory); MultiFieldQueryParser parser = new MultiFieldQueryParser( new string[] { "summary", "keyName" }, analyzer); Query query = parser.Parse(term); Hits hits = searcher.Search(query); var tickets = new List<Ticket>(); for (int i = 0; i < hits.Length(); i++) { Document doc = hits.Doc(i); int id = 0; if (int.TryParse(doc.Get("id"), out id)) { tickets.Add(_ticketRepo.GetTicketById(id)); } } //Clean up everything searcher.Close(); directory.Close(); return View(new SearchIndexModel() { Tickets = tickets }); }
public DefaultQuery WithKeywords(string keywords) { if (!string.IsNullOrEmpty(keywords)) { var parser = new MultiFieldQueryParser( Version.LUCENE_29, this.SearchFields, new StandardAnalyzer(Version.LUCENE_29)); Query multiQuery = parser.Parse(keywords); this.AddQuery(multiQuery); } return this; }
// verify parsing of query using a stopping analyzer private void AssertStopQueryEquals(string qtxt, string expectedRes) { string[] fields = new string[] { "b", "t" }; Occur[] occur = new Occur[] { Occur.SHOULD, Occur.SHOULD }; TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer(); MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, a); Query q = mfqp.Parse(qtxt); Assert.AreEqual(expectedRes, q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, qtxt, fields, occur, a); Assert.AreEqual(expectedRes, q.ToString()); }
public virtual void TestAnalyzerReturningNull() { var fields = new string[] { "f1", "f2", "f3" }; var parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new AnalyzerReturningNull()); var q = parser.Parse("bla AND blo"); Assert.AreEqual("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.ToString()); // the following queries are not affected as their terms are not analyzed anyway: q = parser.Parse("bla*"); Assert.AreEqual("f1:bla* f2:bla* f3:bla*", q.ToString()); q = parser.Parse("bla~"); Assert.AreEqual("f1:bla~0.5 f2:bla~0.5 f3:bla~0.5", q.ToString()); q = parser.Parse("[a TO c]"); Assert.AreEqual("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.ToString()); }
public virtual void TestParsingQueryWithoutBoosts() { var analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT); var fields = new[] { "f1", "f2" }; var boosts = new Dictionary <String, Single> { { "f1", 2 } // missing f2 intentional }; var parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, analyzer, boosts); var query = parser.Parse("bazinga"); Assert.AreEqual("f1:bazinga^2.0 f2:bazinga", query.ToString()); }
public virtual void TestStaticMethod3Old() { var queries = new [] { "one", "two" }; var fields = new [] { "b", "t" }; var flags = new [] { Occur.MUST, Occur.MUST_NOT }; var q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+b:one -t:two", q.ToString()); var flags2 = new [] { Occur.MUST }; Assert.Throws <ArgumentException>( () => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); }
public virtual void TestStaticMethod2() { string[] fields = new [] { "b", "t" }; Occur[] flags = new [] { Occur.MUST, Occur.MUST_NOT }; Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+b:one -t:one", q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString()); Occur[] flags2 = new [] { Occur.MUST }; Assert.Throws <ArgumentException>( () => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); }
public virtual void TestStaticMethod1() { var fields = new [] { "b", "t" }; var queries = new [] { "one", "two" }; Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("b:one t:two", q.ToString()); var queries2 = new [] { "+one", "+two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries2, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("(+b:one) (+t:two)", q.ToString()); var queries3 = new [] { "one", "+two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries3, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("b:one (+t:two)", q.ToString()); var queries4 = new [] { "one +more", "+two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries4, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString()); var queries5 = new [] { "blah" }; Assert.Throws <ArgumentException>(() => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries5, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); // check also with stop words for this static form (qtxts[], fields[]). var stopA = new TestQueryParser.QPTestAnalyzer(); var queries6 = new [] { "((+stop))", "+((stop))" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries6, fields, stopA); Assert.AreEqual("", q.ToString()); var queries7 = new [] { "one ((+stop)) +more", "+((stop)) +two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries7, fields, stopA); Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString()); }
public virtual void TestStopWordSearching() { Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT); Directory ramDir = new RAMDirectory(); var iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); var doc = new Document(); doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED)); iw.AddDocument(doc); iw.Close(); var mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, new[] { "body" }, analyzer); mfqp.DefaultOperator = QueryParser.Operator.AND; var q = mfqp.Parse("the footest"); var is_Renamed = new IndexSearcher(ramDir, true); var hits = is_Renamed.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); is_Renamed.Close(); }
public virtual void TestStaticMethod2Old() { var fields = new[] { "b", "t" }; //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD}; var flags = new[] { Occur.MUST, Occur.MUST_NOT }; var parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); //, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+b:one -t:one", q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString()); var flags2 = new [] { Occur.MUST }; Assert.Throws <ArgumentException>( () => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); }
/// <summary> /// 搜索内容 /// </summary> /// <param name="word">搜索关键字</param> /// <param name="pagesize">每页显示记录数</param> /// <param name="pageindex">当前页码</param> /// <returns></returns> public static SearchResult SearchContent(string modcode, string word, int pagesize, int pageindex, string searchparam1, string searchparam2, string searchparam3) { SearchResult searchResult = new SearchResult(); FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NativeFSLockFactory()); IndexSearcher searcher = new IndexSearcher(directory, true); var analyzer = new PanGuAnalyzer(); //初始化MultiFieldQueryParser以便同时查询多列 Lucene.Net.QueryParsers.MultiFieldQueryParser parser = new Lucene.Net.QueryParsers.MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new string[] { "title", "content" }, analyzer); Lucene.Net.Search.Query query = parser.Parse(word);//初始化Query parser.DefaultOperator = QueryParser.AND_OPERATOR; Lucene.Net.Search.BooleanQuery boolQuery = new Lucene.Net.Search.BooleanQuery(); boolQuery.Add(query, Occur.MUST); if (!string.IsNullOrEmpty(modcode)) { PhraseQuery queryModCode = new PhraseQuery(); queryModCode.Add(new Term("modcode", modcode)); boolQuery.Add(queryModCode, Occur.MUST); } if (!string.IsNullOrEmpty(searchparam1)) { WildcardQuery query1 = new WildcardQuery(new Term("searchparam1", "*" + searchparam1 + "*")); boolQuery.Add(query1, Occur.MUST); } if (!string.IsNullOrEmpty(searchparam2)) { WildcardQuery query1 = new WildcardQuery(new Term("searchparam2", "*" + searchparam2 + "*")); boolQuery.Add(query1, Occur.MUST); } if (!string.IsNullOrEmpty(searchparam3)) { WildcardQuery query1 = new WildcardQuery(new Term("searchparam3", "*" + searchparam3 + "*")); boolQuery.Add(query1, Occur.MUST); } Sort sort = new Sort(new SortField("date", SortField.STRING, true)); var result = searcher.Search(boolQuery, null, 1000, sort); if (result.TotalHits == 0) { searchResult.count = 0; } else { searchResult.count = result.TotalHits; int startNum = 0, endNum = result.TotalHits; if (pagesize > 0) { //当pagesize>0时使用分页功能 startNum = (pageindex - 1) * pagesize; endNum = startNum + pagesize; } ScoreDoc[] docs = result.ScoreDocs; List <JObject> dataList = new List <JObject>(); for (int i = 0; i < docs.Length; i++) { if (i < startNum) { continue; } if (i >= endNum) { break; } Document doc = searcher.Doc(docs[i].Doc); string id = doc.Get("id").ToString(); string title = doc.Get("title").ToString(); string content = doc.Get("content").ToString(); string date = doc.Get("date").ToString(); string param = doc.Get("param").ToString(); string mcode = doc.Get("modcode").ToString(); string param1 = doc.Get("searchparam1").ToString(); string param2 = doc.Get("searchparam2").ToString(); string param3 = doc.Get("searchparam3").ToString(); JObject obj = new JObject(); obj["id"] = id; //创建HTMLFormatter,参数为高亮单词的前后缀 string highLightTag = Util.GetAppSetting("HighLightTag", "<font color=\"red\">|</font>"); string[] tarArr = highLightTag.Split('|'); var simpleHTMLFormatter = new SimpleHTMLFormatter(tarArr[0], tarArr[1]); //创建 Highlighter ,输入HTMLFormatter 和 盘古分词对象Semgent var highlighter = new Highlighter(simpleHTMLFormatter, new PanGu.Segment()); //设置每个摘要段的字符数 int highlightFragmentSize = Util.GetAppSetting("HighlightFragmentSize", "100").ToInt(); highlighter.FragmentSize = highlightFragmentSize; //获取最匹配的摘要段 String bodyPreview = highlighter.GetBestFragment(word, content); string newTitle = highlighter.GetBestFragment(word, title); if (!string.IsNullOrEmpty(newTitle)) { title = newTitle; } obj["title"] = title; obj["content"] = bodyPreview; obj["date"] = date; obj["param"] = param; obj["modcode"] = mcode; obj["searchparam1"] = param1; obj["searchparam2"] = param2; obj["searchparam3"] = param3; dataList.Add(obj); } searchResult.data = dataList; } analyzer.Close(); searcher.Dispose(); directory.Dispose(); return(searchResult); }