public ActionResult Search(string query) { ViewData["Message"] = "query : " + query; var searcher = new IndexSearcher( new Lucene.Net.Store.SimpleFSDirectory(new DirectoryInfo(Configuration.IndexDirectory)), readOnly: true); var fieldsToSearchIn = new[] {Configuration.Fields.Name, Configuration.Fields.Description}; var queryanalizer = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fieldsToSearchIn, new BrazilianAnalyzer()); var numberOfResults = 10; var top10Results = searcher.Search(queryanalizer.Parse(query), numberOfResults); var docs = new List<DocumentViewModel>(); foreach (var scoreDoc in top10Results.scoreDocs) { var document = searcher.Doc(scoreDoc.doc); var name = document.GetField(Configuration.Fields.Name).StringValue(); var description = document.GetField(Configuration.Fields.Description).StringValue(); var link = document.GetField(Configuration.Fields.Link).StringValue(); docs.Add(new DocumentViewModel(name, description, link)); } return View(new SearchViewModel(docs)); }
public virtual void TestSimple() { string[] fields = new string[]{"b", "t"}; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query q = mfqp.Parse("one"); Assert.AreEqual("b:one t:one", q.ToString()); q = mfqp.Parse("one two"); Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString()); q = mfqp.Parse("+one +two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("+one -two -three"); Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString()); q = mfqp.Parse("one^2 two"); Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~ two"); Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~0.8 two^2"); Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString()); q = mfqp.Parse("one* two*"); Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString()); q = mfqp.Parse("[a TO c] two"); Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString()); q = mfqp.Parse("w?ldcard"); Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString()); q = mfqp.Parse("\"foo bar\""); Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString()); q = mfqp.Parse("\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString()); // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field. q = mfqp.Parse("b:\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4", q.ToString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two"); Assert.AreEqual("(b:one t:one) f:two", q.ToString()); // AND mode: mfqp.DefaultOperator = QueryParser.AND_OPERATOR; q = mfqp.Parse("one two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString()); }
/// <summary> /// Searches the lucene index with the search text. /// </summary> /// <param name="searchText">The text to search with.</param> /// <remarks>Syntax reference: http://lucene.apache.org/java/2_3_2/queryparsersyntax.html#Wildcard</remarks> /// <exception cref="SearchException">An error occured searching the lucene.net index.</exception> public virtual IEnumerable<SearchResultViewModel> Search(string searchText) { // This check is for the benefit of the CI builds if (!Directory.Exists(IndexPath)) CreateIndex(); List<SearchResultViewModel> list = new List<SearchResultViewModel>(); if (string.IsNullOrWhiteSpace(searchText)) return list; StandardAnalyzer analyzer = new StandardAnalyzer(LUCENEVERSION); MultiFieldQueryParser parser = new MultiFieldQueryParser(LuceneVersion.LUCENE_29, new string[] { "content", "title", "projectlanguage", "projectstatus" }, analyzer); Query query = null; try { query = parser.Parse(searchText); } catch (Lucene.Net.QueryParsers.ParseException) { // Catch syntax errors in the search and remove them. searchText = QueryParser.Escape(searchText); query = parser.Parse(searchText); } if (query != null) { try { using (IndexSearcher searcher = new IndexSearcher(FSDirectory.Open(new DirectoryInfo(IndexPath)), true)) { TopDocs topDocs = searcher.Search(query, 1000); foreach (ScoreDoc scoreDoc in topDocs.ScoreDocs) { Document document = searcher.Doc(scoreDoc.Doc); list.Add(new SearchResultViewModel(document, scoreDoc)); } } } catch (FileNotFoundException) { // For 1.7's change to the Lucene search path. CreateIndex(); } catch (Exception ex) { throw new SearchException(ex, "An error occured while searching the index, try rebuilding the search index via the admin tools to fix this."); } } return list; }
public static Query SafeGetSearchQuery(this string term, MultiFieldQueryParser q, Analyzer analyser) { Query query; try { query = q.Parse(term.MakeFuzzy()); } catch { var searchTerm = term.Sanitize(analyser); query = q.Parse(searchTerm); } return query; }
public virtual void TestAnalyzerReturningNull() { System.String[] fields = new System.String[] { "f1", "f2", "f3" }; MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new AnalyzerReturningNull()); Query q = parser.Parse("bla AND blo"); Assert.AreEqual("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.ToString()); // the following queries are not affected as their terms are not analyzed anyway: q = parser.Parse("bla*"); Assert.AreEqual("f1:bla* f2:bla* f3:bla*", q.ToString()); q = parser.Parse("bla~"); Assert.AreEqual("f1:bla~0.5 f2:bla~0.5 f3:bla~0.5", q.ToString()); q = parser.Parse("[a TO c]"); Assert.AreEqual("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.ToString()); }
public virtual void TestAnalyzerReturningNull() { var fields = new string[] { "f1", "f2", "f3" }; var parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new AnalyzerReturningNull()); var q = parser.Parse("bla AND blo"); Assert.AreEqual("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.ToString()); // the following queries are not affected as their terms are not analyzed anyway: q = parser.Parse("bla*"); Assert.AreEqual("f1:bla* f2:bla* f3:bla*", q.ToString()); q = parser.Parse("bla~"); Assert.AreEqual("f1:bla~0.5 f2:bla~0.5 f3:bla~0.5", q.ToString()); q = parser.Parse("[a TO c]"); Assert.AreEqual("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.ToString()); }
public Data searchLucene(Data data) { Search_gl search = new Search_gl(); List<string> item = new List<string>(); Lucene.Net.Store.Directory directory = FSDirectory.Open(new DirectoryInfo(Environment.CurrentDirectory + "\\LuceneIndex")); var analyzer = new StandardAnalyzer(Version.LUCENE_29); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); //QueryParser queryParser = new QueryParser(Version.LUCENE_29, "summary", analyzer); //search for single field MultiFieldQueryParser parser = new MultiFieldQueryParser(new string[] {"name", "summary"}, analyzer); //search for multifield Query query = parser.Parse((data.getString("search")) + "*"); //cant search blank text with wildcard as first character TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().ScoreDocs; int count = hits.Length; for (int i = 0; i < count; i++) { int docId = hits[i].doc; float score = hits[i].score; Document doc = searcher.Doc(docId); string id = doc.Get("id"); item.Add(id); } Data list = search.search(data, item.ToArray()); reader.Close(); searcher.Close(); return list; }
public SearchResults Search(DocumentRoot root, string term) { var results = new SearchResults(); var indexPath = _settings.GetSearchIndexPath(); var version = Lucene.Net.Util.Version.LUCENE_30; using (var directory = FSDirectory.Open(new DirectoryInfo(indexPath))) using (var indexReader = IndexReader.Open(directory, true)) using (var indexSearch = new IndexSearcher(indexReader)) { var analyzer = new StandardAnalyzer(version); var queryParser = new MultiFieldQueryParser(version, new[] { "Title", "Body" }, analyzer); var query = queryParser.Parse(term); var resultDocs = indexSearch.Search(query, indexReader.MaxDoc); var hits = resultDocs.ScoreDocs; foreach (var hit in hits) { var doc = indexSearch.Doc(hit.Doc); results.Documents.Add(new SearchResult { Score = hit.Score, Snippet = doc.Get("Snippet"), Title = doc.Get("Title") }); } } return results; }
IEnumerable<string> Search(string textToFind) { var reader = IndexReader.Open(_ramDirectory, true); var searcher = new IndexSearcher(reader); var analyzer = new StandardAnalyzer(Version.LUCENE_29); var parser = new MultiFieldQueryParser(Version.LUCENE_29, _searchFields, analyzer); var query = parser.Parse(textToFind); var collector = TopScoreDocCollector.create(100, true); searcher.Search(query, collector); var hits = collector.TopDocs().scoreDocs; var foundKeys = new List<string>(); foreach (ScoreDoc scoreDoc in hits) { var document = searcher.Doc(scoreDoc.doc); var key = document.Get(_keyField); if (key != null && !foundKeys.Contains(key)) { foundKeys.Add(key); } } reader.Close(); searcher.Close(); analyzer.Close(); return foundKeys; }
public Data searchLucene(Data data) { Account_lg account = new Account_lg(); List<string> item = new List<string>(); Lucene.Net.Store.Directory directory = FSDirectory.Open(new DirectoryInfo("C:\\Visual Studio 2010\\Transaction" + "\\LuceneIndex")); var analyzer = new StandardAnalyzer(Version.LUCENE_29); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); MultiFieldQueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new string[] { "name", "username" }, analyzer); //search for multifield Query query = parser.Parse((data.getString("search")) + "*"); //cant search blank text with wildcard as first character TopScoreDocCollector collector = TopScoreDocCollector.Create(1000, true); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().ScoreDocs; int count = hits.Length; for (int i = 0; i < count; i++) { int docId = hits[i].Doc; float score = hits[i].Score; Document doc = searcher.Doc(docId); string id = doc.Get("id"); item.Add(id); } Data list = account.selectUser(data, item.ToArray()); reader.Dispose(); searcher.Dispose(); return list; }
public TopDocs SearchForQuery(string querytext, out Lucene.Net.Search.Query query, bool toProcess, bool toExpand) // Searches index with query text { Stopwatch stopwatch2 = Stopwatch.StartNew(); if (!toProcess) { querytext = "\"" + querytext + "\""; } else { if (toExpand) { var standardParser = new QueryParser(VERSION, URL_FN, new StandardAnalyzer(VERSION)); querytext = standardParser.Parse(querytext).ToString().Replace("Url:", "").Replace("Query:", ""); querytext = GetSynonyms(querytext); } querytext = querytext.ToLower(); } if (querytext != string.Empty) { query = parser.Parse(querytext); currentQuery = query; stopwatch2.Stop(); queryTime = stopwatch2.Elapsed.TotalSeconds.ToString(); finalQuery = query.ToString(); TopDocs results = searcher.Search(query, 100); return(results); } else { query = null; return(null); } }
public void RemoveFromIndex(string[] keys, string value) { var analyzer = new AccentedAnalyzer(); var query = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, keys, analyzer); var directory = FSDirectory.Open(System.IO.Directory.GetParent(IndexPath)); IndexReader idxReader = IndexReader.Open(indexPath); var searcher = new IndexSearcher(directory, true); query.SetDefaultOperator(QueryParser.Operator.AND); var q = query.Parse(value); int top = idxReader.MaxDoc(); var results = TopScoreDocCollector.create(top, true); searcher.Search(q, results); ScoreDoc[] hits = results.TopDocs().scoreDocs; Document[] documents = new Document[hits.Length]; IndexReader indexReader = null; for (int i = 0; i < hits.Length; i++) { int docId = hits[i].doc; indexReader = IndexReader.Open(directory, false); indexReader.DeleteDocument(docId); indexReader.Commit(); indexReader.Flush(); indexReader.Close(); } searcher.Close(); directory.Close(); }
public List<Advert> SearchAds(string query) { if (_searcher == null) return null; ICollection fields = _searcher.Reader.GetFieldNames(IndexReader.FieldOption.ALL); List<string> fldList = new List<string>(); foreach (DictionaryEntry f in fields) { fldList.Add(f.Key.ToString()); } List<Advert> adverts = new List<Advert>(); MultiFieldQueryParser parser = new MultiFieldQueryParser(fldList.ToArray(), _analyzer); Query q = parser.Parse(query); Hits hits = _searcher.Search(q); PropertyDescriptors desc = new PropertyDescriptors(); desc.LoadData(System.Windows.Forms.Application.StartupPath + "\\PropertyDescriptors.xml"); for (int i = 0; i < hits.Length(); i++) { Advert ad = new Advert(); Document doc = hits.Doc(i); foreach (Field f in doc.Fields()) { string temp = desc.GetDisplayableFormat(f.Name(), f.StringValue()); ad[f.Name()] = temp; } adverts.Add(ad); } return adverts; }
public ProjectData[] Search(string searchTerm) { IndexSearcher searcher = new IndexSearcher(luceneIndexDirectory); IntegralCollector searcherCollector = new IntegralCollector(); // Setup the fields to search through string[] searchfields = new string[] { "name", "vessel" }; // Build our booleanquery that will be a combination of all the queries for each individual search term var finalQuery = new BooleanQuery(); var parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, searchfields, analyzer); // Split the search string into separate search terms by word string[] terms = searchTerm.Split(new[] { " " }, StringSplitOptions.RemoveEmptyEntries); foreach (string term in terms) finalQuery.Add(parser.Parse(term.Replace("~", "") + "~"),Occur.SHOULD); searcher.Search(finalQuery, searcherCollector); var results = new ProjectData[searcherCollector.Docs.Count]; for (int i = 0; i < searcherCollector.Docs.Count; i++) { var doc = searcher.Doc(searcherCollector.Docs[i]); results[i] = new ProjectData(doc.Get("name"), doc.Get("vessel")); } return results; }
public virtual void TestStaticMethod2Old() { System.String[] fields = new System.String[] { "b", "t" }; //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD}; BooleanClause.Occur[] flags = new BooleanClause.Occur[] { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT }; MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer()); Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer()); //, fields, flags, new StandardAnalyzer()); Assert.AreEqual("+b:one -t:one", q.ToString()); q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer()); Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString()); try { BooleanClause.Occur[] flags2 = new BooleanClause.Occur[] { BooleanClause.Occur.MUST }; q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer()); Assert.Fail(); } catch (System.ArgumentException e) { // expected exception, array length differs } }
public static SearchResults Query(HttpContext ctx, string query, int page = 1) { var indexPath = ctx.Server.MapPath("~/App_Data/Index"); var indexSearcher = new DirectoryIndexSearcher(new DirectoryInfo(indexPath)); using (var searchService = new SearchService(indexSearcher)) { var parser = new MultiFieldQueryParser( Lucene.Net.Util.Version.LUCENE_29, new[] { "Text" }, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29)); Query multiQuery = parser.Parse(query); var result = searchService.SearchIndex(multiQuery); return new SearchResults { Documents = result.Results .Skip(PageSize*(page - 1)) .Take(PageSize) .Select(d => new SearchResult { Url = d.Get("Url"), Title = d.Get("Title"), Summary = d.Get("Summary") }), TotalCount = result.Results.Count() }; } }
public IList<MedicalConsultory> Search(string searchText) { if (string.IsNullOrWhiteSpace(searchText)) return new List<MedicalConsultory>(); using (var searcher = new IndexSearcher(_indexDirectoryPath, true)) { var analyzer = new StandardAnalyzer(_version); // Multi field search var query = new BooleanQuery(); var parser = new MultiFieldQueryParser(_version, new[] { "SpecialtyName", "City" }, analyzer); searchText.RemoveIrrelevantTerms() .Split(' ') .ToList() .ForEach(word => { query.Add(parser.Parse(word), Occur.SHOULD); }); var hits = searcher.Search(query, null, searcher.MaxDoc, Sort.RELEVANCE).ScoreDocs; // Simple field search //var parser = new QueryParser(Version.LUCENE_30, "SpecialtyName", analyzer); //var query = parser.Parse(searchText.Trim()); //var hits = searcher.Search(query, searcher.MaxDoc).ScoreDocs; var results = hits.Select(hit => MapMedicalConsultory(hit, searcher)).ToList(); analyzer.Close(); searcher.Dispose(); return results; } }
public override List<ISearchEntity> GetSearchResult(out int MatchCount) { Analyzer analyzer = new StandardAnalyzer(); IndexSearcher searcher = new IndexSearcher(searchInfo.ConfigElement.IndexDirectory); MultiFieldQueryParser parserName = new MultiFieldQueryParser(new string[] { "title", "content", "keywords" }, analyzer); Query queryName = parserName.Parse(searchInfo.QueryString); Hits hits = searcher.Search(queryName); List<ISearchEntity> ResultList = new List<ISearchEntity>(); for (int i = 0; i < hits.Length(); i++) { Document doc = hits.Doc(i); ResultList.Add((ISearchEntity)new NewsModel() { EntityIdentity = Convert.ToInt32(doc.Get("newsid")), Title = Convert.ToString(doc.Get("title")), Content = Convert.ToString(doc.Get("content")), Keywords = doc.Get("keywords") }); } searcher.Close(); MatchCount = hits.Length(); return ResultList; }
public void Should_parse_using_lucene_query() { var parser = new MultiFieldQueryParser(Version.LUCENE_29, new[] {"Name"}, new StandardAnalyzer(Version.LUCENE_29)); var query = parser.Parse("Switzerland"); var q = SearchSession.CreateFullTextQuery(query, typeof (Country)) .List<Country>(); Assert.AreEqual(1, q.Count); }
public Query GenerateLuceneQuery(string query, ILocationGroupingStrategy groupingStrategy) { var parser = new MultiFieldQueryParser(Version.LUCENE_29, groupingStrategy.Fields.Select(f => f.Name).ToArray(), _analyzer); parser.DefaultOperator = QueryParser.Operator.AND; Query luceneQuery = parser.Parse(query); return luceneQuery; }
private Hits GetHitsByFiled(string filedname,Analyzer analyzer, string searchStr, IndexSearcher searcher) { MultiFieldQueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_CURRENT, new string[] { filedname}, analyzer); Query query = parser.Parse(searchStr); Sort sort = new Sort(); SortField f = new SortField("publish_time", SortField.STRING, true);//按照publish_time字段排序,true表示降序 sort.SetSort(f); Hits hits = searcher.Search(query, sort); return hits; }
private Query GenerateLuceneQuery(string query, IEnumerable<LocationQueryField> fieldsToQuery) { var parser = new MultiFieldQueryParser(Version.LUCENE_29, fieldsToQuery.Select(f => f.Name).Distinct().ToArray(), _analyzer); parser.DefaultOperator = QueryParser.Operator.AND; parser.FuzzyPrefixLength = 3; Query luceneQuery = parser.Parse(query); return luceneQuery; }
public virtual void TestSimple() { System.String[] fields = new System.String[]{"b", "t"}; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer()); Query q = mfqp.Parse("one"); Assert.AreEqual("b:one t:one", q.ToString()); q = mfqp.Parse("one two"); Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString()); q = mfqp.Parse("+one +two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("+one -two -three"); Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString()); q = mfqp.Parse("one^2 two"); Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~ two"); Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~0.8 two^2"); Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString()); q = mfqp.Parse("one* two*"); Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString()); q = mfqp.Parse("[a TO c] two"); Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString()); q = mfqp.Parse("w?ldcard"); Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString()); q = mfqp.Parse("\"foo bar\""); Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString()); q = mfqp.Parse("\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two"); Assert.AreEqual("(b:one t:one) f:two", q.ToString()); // AND mode: mfqp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR); q = mfqp.Parse("one two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString()); }
public async Task<IReadOnlyList<Song>> SearchSong(string searchText) { var hitsLimit = 5000; var fields = new[] { nameof(Song.Artist), nameof(Song.Name) }; var parser = new MultiFieldQueryParser(Version.LUCENE_30, fields, this.analyzer); var query = parser.Parse(searchText); var hits = await Run(() => this.searcher.Search(query, null, hitsLimit, Sort.RELEVANCE).ScoreDocs); var docs = hits.Select(x => Tuple.Create(x.Doc, this.searcher.Doc(x.Doc))); var results = this.mapper.GetSongs(docs).ToList(); return results; }
public PageQuery WithKeywords(String keywords) { if (!String.IsNullOrEmpty(keywords)) { QueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, Fields, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29)); Query multiQuery = parser.Parse(keywords); this.AddQuery(multiQuery); } return this; }
public DefaultQuery WithKeywords(string keywords) { if (!string.IsNullOrEmpty(keywords)) { var parser = new MultiFieldQueryParser( Version.LUCENE_29, this.SearchFields, new StandardAnalyzer(Version.LUCENE_29)); Query multiQuery = parser.Parse(keywords); this.AddQuery(multiQuery); } return this; }
public virtual ActionResult SearchIndex(string term) { //Setup indexer Directory directory = FSDirectory.GetDirectory("LuceneIndex", true); Analyzer analyzer = new StandardAnalyzer(); IndexWriter writer = new IndexWriter(directory, analyzer, true); IndexReader red = IndexReader.Open(directory); int totDocs = red.MaxDoc(); red.Close(); foreach (var ticket in _ticketRepo.GetTicketsByProject(CurrentProject, 0, 1000).Items) { AddListingToIndex(ticket, writer); } writer.Optimize(); //Close the writer writer.Close(); //Setup searcher IndexSearcher searcher = new IndexSearcher(directory); MultiFieldQueryParser parser = new MultiFieldQueryParser( new string[] { "summary", "keyName" }, analyzer); Query query = parser.Parse(term); Hits hits = searcher.Search(query); var tickets = new List<Ticket>(); for (int i = 0; i < hits.Length(); i++) { Document doc = hits.Doc(i); int id = 0; if (int.TryParse(doc.Get("id"), out id)) { tickets.Add(_ticketRepo.GetTicketById(id)); } } //Clean up everything searcher.Close(); directory.Close(); return View(new SearchIndexModel() { Tickets = tickets }); }
public static List<Document> MultiQuery(string search, string[] searchFields) { var analyzer = new StandardAnalyzer(Version.LUCENE_29); using (var indexSearcher = new IndexSearcher(Directory, false)) { var queryParser = new MultiFieldQueryParser(Version.LUCENE_29, searchFields, analyzer); var query = queryParser.Parse(search); var hits = indexSearcher.Search(query,null,999,Sort.RELEVANCE).ScoreDocs; analyzer.Close(); return hits.Select(hit => indexSearcher.Doc(hit.Doc)).ToList(); } }
public void CanLookupEntityByUpdatedValueInSet() { Product p = s.Get<Product>(p1.Id); p.Authors.Add(s.Get<Author>(a4.Id)); tx.Commit(); QueryParser parser = new MultiFieldQueryParser(new string[] { "name", "authors.name" }, new StandardAnalyzer()); IFullTextSession session = Search.CreateFullTextSession(s); Query query = parser.Parse("Proust"); IList result = session.CreateFullTextQuery(query).List(); //HSEARCH-56 Assert.AreEqual(1, result.Count, "update of collection of embedded ignored"); }
// verify parsing of query using a stopping analyzer private void AssertStopQueryEquals(System.String qtxt, System.String expectedRes) { System.String[] fields = new System.String[]{"b", "t"}; Occur[] occur = new Occur[]{Occur.SHOULD, Occur.SHOULD}; TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer(); MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, a); Query q = mfqp.Parse(qtxt); Assert.AreEqual(expectedRes, q.ToString()); q = MultiFieldQueryParser.Parse(qtxt, fields, occur, a); Assert.AreEqual(expectedRes, q.ToString()); }
// verify parsing of query using a stopping analyzer private void AssertStopQueryEquals(string qtxt, string expectedRes) { string[] fields = new string[]{"b", "t"}; Occur[] occur = new Occur[]{Occur.SHOULD, Occur.SHOULD}; TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer(); MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, a); Query q = mfqp.Parse(qtxt); Assert.AreEqual(expectedRes, q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, qtxt, fields, occur, a); Assert.AreEqual(expectedRes, q.ToString()); }
public CreativeQuery WithKeywords(string keywords) { if (!string.IsNullOrEmpty(keywords)) { string[] fields = { "Title", "About" }; var parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, fields, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29)); Query multiQuery = parser.Parse(keywords); this.AddQuery(multiQuery); } return this; }
public IActionResult Get(string searchTerm) { if (string.IsNullOrEmpty(searchTerm)) { return new JsonResult(null); } if (searchTerm.StartsWith("*") || searchTerm.StartsWith("?")) { return new JsonResult(null); } CloudStorageAccount cloudStorageAccount = CloudStorageAccount.DevelopmentStorageAccount; CloudStorageAccount.TryParse(_options.ConnectionString, out cloudStorageAccount); var azureDirectory = new AzureDirectory(cloudStorageAccount, "TestCatalog"); var searcher = new IndexSearcher(azureDirectory); var analyzer = new NuGet.Indexing.IdentifierAnalyzer(); var queryParser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, new[] { "Type", "ReturnType" }, analyzer); Query query = queryParser.Parse(searchTerm); //execute the query var hits = searcher.Search(query, 50); var packages = new List<SearchResult>(); foreach (var hit in hits.ScoreDocs) { var doc = searcher.Doc(hit.Doc); var result = new SearchResult { FullTypeName = doc.GetField("Type").StringValue, PackageName = doc.GetField("Package").StringValue }; if (doc.GetField("Signature") != null) { result.Signature = doc.GetField("Signature").StringValue; } if (doc.GetField("ReturnType") != null) { result.ReturnType = doc.GetField("ReturnType").StringValue; } packages.Add(result); } return new JsonResult(packages); }
// verify parsing of query using a stopping analyzer private void AssertStopQueryEquals(System.String qtxt, System.String expectedRes) { System.String[] fields = new System.String[] { "b", "t" }; Occur[] occur = new Occur[] { Occur.SHOULD, Occur.SHOULD }; TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer(); MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, a); Query q = mfqp.Parse(qtxt); Assert.AreEqual(expectedRes, q.ToString()); q = MultiFieldQueryParser.Parse(qtxt, fields, occur, a); Assert.AreEqual(expectedRes, q.ToString()); }
// verify parsing of query using a stopping analyzer private void AssertStopQueryEquals(string qtxt, string expectedRes) { string[] fields = new string[] { "b", "t" }; Occur[] occur = new Occur[] { Occur.SHOULD, Occur.SHOULD }; TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer(); MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, a); Query q = mfqp.Parse(qtxt); Assert.AreEqual(expectedRes, q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, qtxt, fields, occur, a); Assert.AreEqual(expectedRes, q.ToString()); }
public virtual void TestParsingQueryWithoutBoosts() { var analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT); var fields = new[] { "f1", "f2" }; var boosts = new Dictionary <String, Single> { { "f1", 2 } // missing f2 intentional }; var parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, analyzer, boosts); var query = parser.Parse("bazinga"); Assert.AreEqual("f1:bazinga^2.0 f2:bazinga", query.ToString()); }
private TopDocs SearchIndex(string querytext) { Log += "\r\nSearching for ' " + querytext + " ' ~ ' "; querytext = querytext.ToLower(); querytext = Regex.Replace(querytext, @"[^0-9a-z.]+", " "); //querytext = querytext.Replace(",",String.Empty); Log += querytext + " ' "; Query query = parser.Parse(querytext); TopDocs results = searcher.Search(query, 1400); Log += "\r\nNumber of results is " + results.TotalHits; return(results); }
public virtual void TestStaticMethod3Old() { var queries = new [] { "one", "two" }; var fields = new [] { "b", "t" }; var flags = new [] { Occur.MUST, Occur.MUST_NOT }; var q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+b:one -t:two", q.ToString()); var flags2 = new [] { Occur.MUST }; Assert.Throws <ArgumentException>( () => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); }
public OnTimeItemQuery WithKeywords(string keywords) { if (!string.IsNullOrEmpty(keywords)) { string[] fields = { "title", "description", "assignedto" }; var parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, fields, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29)); parser.SetAllowLeadingWildcard(true); parser.SetFuzzyPrefixLength(1); Query multiQuery = parser.Parse(keywords); this.AddQuery(multiQuery); } return this; }
public virtual void TestStaticMethod2() { string[] fields = new [] { "b", "t" }; Occur[] flags = new [] { Occur.MUST, Occur.MUST_NOT }; Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+b:one -t:one", q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString()); Occur[] flags2 = new [] { Occur.MUST }; Assert.Throws <ArgumentException>( () => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); }
public virtual void TestStaticMethod1() { System.String[] fields = new System.String[] { "b", "t" }; System.String[] queries = new System.String[] { "one", "two" }; Query q = MultiFieldQueryParser.Parse(queries, fields, new StandardAnalyzer()); Assert.AreEqual("b:one t:two", q.ToString()); System.String[] queries2 = new System.String[] { "+one", "+two" }; q = MultiFieldQueryParser.Parse(queries2, fields, new StandardAnalyzer()); Assert.AreEqual("(+b:one) (+t:two)", q.ToString()); System.String[] queries3 = new System.String[] { "one", "+two" }; q = MultiFieldQueryParser.Parse(queries3, fields, new StandardAnalyzer()); Assert.AreEqual("b:one (+t:two)", q.ToString()); System.String[] queries4 = new System.String[] { "one +more", "+two" }; q = MultiFieldQueryParser.Parse(queries4, fields, new StandardAnalyzer()); Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString()); System.String[] queries5 = new System.String[] { "blah" }; try { q = MultiFieldQueryParser.Parse(queries5, fields, new StandardAnalyzer()); Assert.Fail(); } catch (System.ArgumentException e) { // expected exception, array length differs } // check also with stop words for this static form (qtxts[], fields[]). TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer(); System.String[] queries6 = new System.String[] { "((+stop))", "+((stop))" }; q = MultiFieldQueryParser.Parse(queries6, fields, stopA); Assert.AreEqual("", q.ToString()); System.String[] queries7 = new System.String[] { "one ((+stop)) +more", "+((stop)) +two" }; q = MultiFieldQueryParser.Parse(queries7, fields, stopA); Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString()); }
public virtual void TestStaticMethod3Old() { System.String[] queries = new System.String[] { "one", "two" }; System.String[] fields = new System.String[] { "b", "t" }; BooleanClause.Occur[] flags = new BooleanClause.Occur[] { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT }; Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer()); Assert.AreEqual("+b:one -t:two", q.ToString()); try { BooleanClause.Occur[] flags2 = new BooleanClause.Occur[] { BooleanClause.Occur.MUST }; q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer()); Assert.Fail(); } catch (System.ArgumentException e) { // expected exception, array length differs } }
public virtual void TestStaticMethod1() { var fields = new [] { "b", "t" }; var queries = new [] { "one", "two" }; Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("b:one t:two", q.ToString()); var queries2 = new [] { "+one", "+two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries2, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("(+b:one) (+t:two)", q.ToString()); var queries3 = new [] { "one", "+two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries3, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("b:one (+t:two)", q.ToString()); var queries4 = new [] { "one +more", "+two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries4, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString()); var queries5 = new [] { "blah" }; Assert.Throws <ArgumentException>(() => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries5, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); // check also with stop words for this static form (qtxts[], fields[]). var stopA = new TestQueryParser.QPTestAnalyzer(); var queries6 = new [] { "((+stop))", "+((stop))" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries6, fields, stopA); Assert.AreEqual("", q.ToString()); var queries7 = new [] { "one ((+stop)) +more", "+((stop)) +two" }; q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, queries7, fields, stopA); Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString()); }
public virtual void TestStopWordSearching() { Analyzer analyzer = new StandardAnalyzer(Util.Version.LUCENE_CURRENT); Directory ramDir = new RAMDirectory(); var iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); var doc = new Document(); doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED)); iw.AddDocument(doc); iw.Close(); var mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, new[] { "body" }, analyzer); mfqp.DefaultOperator = QueryParser.Operator.AND; var q = mfqp.Parse("the footest"); var is_Renamed = new IndexSearcher(ramDir, true); var hits = is_Renamed.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); is_Renamed.Close(); }
public virtual void TestStopWordSearching() { Analyzer analyzer = new StandardAnalyzer(); Directory ramDir = new RAMDirectory(); IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); doc.Add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED)); iw.AddDocument(doc); iw.Close(); MultiFieldQueryParser mfqp = new MultiFieldQueryParser(new System.String[] { "body" }, analyzer); mfqp.SetDefaultOperator(QueryParser.Operator.AND); Query q = mfqp.Parse("the footest"); IndexSearcher is_Renamed = new IndexSearcher(ramDir); ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); is_Renamed.Close(); }
public virtual void TestStaticMethod2Old() { var fields = new[] { "b", "t" }; //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD}; var flags = new[] { Occur.MUST, Occur.MUST_NOT }; var parser = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); //, fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+b:one -t:one", q.ToString()); q = MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString()); var flags2 = new [] { Occur.MUST }; Assert.Throws <ArgumentException>( () => MultiFieldQueryParser.Parse(Util.Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT))); }
/// <summary> /// 搜索内容 /// </summary> /// <param name="word">搜索关键字</param> /// <param name="pagesize">每页显示记录数</param> /// <param name="pageindex">当前页码</param> /// <returns></returns> public static SearchResult SearchContent(string modcode, string word, int pagesize, int pageindex, string searchparam1, string searchparam2, string searchparam3) { SearchResult searchResult = new SearchResult(); FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NativeFSLockFactory()); IndexSearcher searcher = new IndexSearcher(directory, true); var analyzer = new PanGuAnalyzer(); //初始化MultiFieldQueryParser以便同时查询多列 Lucene.Net.QueryParsers.MultiFieldQueryParser parser = new Lucene.Net.QueryParsers.MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_29, new string[] { "title", "content" }, analyzer); Lucene.Net.Search.Query query = parser.Parse(word);//初始化Query parser.DefaultOperator = QueryParser.AND_OPERATOR; Lucene.Net.Search.BooleanQuery boolQuery = new Lucene.Net.Search.BooleanQuery(); boolQuery.Add(query, Occur.MUST); if (!string.IsNullOrEmpty(modcode)) { PhraseQuery queryModCode = new PhraseQuery(); queryModCode.Add(new Term("modcode", modcode)); boolQuery.Add(queryModCode, Occur.MUST); } if (!string.IsNullOrEmpty(searchparam1)) { WildcardQuery query1 = new WildcardQuery(new Term("searchparam1", "*" + searchparam1 + "*")); boolQuery.Add(query1, Occur.MUST); } if (!string.IsNullOrEmpty(searchparam2)) { WildcardQuery query1 = new WildcardQuery(new Term("searchparam2", "*" + searchparam2 + "*")); boolQuery.Add(query1, Occur.MUST); } if (!string.IsNullOrEmpty(searchparam3)) { WildcardQuery query1 = new WildcardQuery(new Term("searchparam3", "*" + searchparam3 + "*")); boolQuery.Add(query1, Occur.MUST); } Sort sort = new Sort(new SortField("date", SortField.STRING, true)); var result = searcher.Search(boolQuery, null, 1000, sort); if (result.TotalHits == 0) { searchResult.count = 0; } else { searchResult.count = result.TotalHits; int startNum = 0, endNum = result.TotalHits; if (pagesize > 0) { //当pagesize>0时使用分页功能 startNum = (pageindex - 1) * pagesize; endNum = startNum + pagesize; } ScoreDoc[] docs = result.ScoreDocs; List <JObject> dataList = new List <JObject>(); for (int i = 0; i < docs.Length; i++) { if (i < startNum) { continue; } if (i >= endNum) { break; } Document doc = searcher.Doc(docs[i].Doc); string id = doc.Get("id").ToString(); string title = doc.Get("title").ToString(); string content = doc.Get("content").ToString(); string date = doc.Get("date").ToString(); string param = doc.Get("param").ToString(); string mcode = doc.Get("modcode").ToString(); string param1 = doc.Get("searchparam1").ToString(); string param2 = doc.Get("searchparam2").ToString(); string param3 = doc.Get("searchparam3").ToString(); JObject obj = new JObject(); obj["id"] = id; //创建HTMLFormatter,参数为高亮单词的前后缀 string highLightTag = Util.GetAppSetting("HighLightTag", "<font color=\"red\">|</font>"); string[] tarArr = highLightTag.Split('|'); var simpleHTMLFormatter = new SimpleHTMLFormatter(tarArr[0], tarArr[1]); //创建 Highlighter ,输入HTMLFormatter 和 盘古分词对象Semgent var highlighter = new Highlighter(simpleHTMLFormatter, new PanGu.Segment()); //设置每个摘要段的字符数 int highlightFragmentSize = Util.GetAppSetting("HighlightFragmentSize", "100").ToInt(); highlighter.FragmentSize = highlightFragmentSize; //获取最匹配的摘要段 String bodyPreview = highlighter.GetBestFragment(word, content); string newTitle = highlighter.GetBestFragment(word, title); if (!string.IsNullOrEmpty(newTitle)) { title = newTitle; } obj["title"] = title; obj["content"] = bodyPreview; obj["date"] = date; obj["param"] = param; obj["modcode"] = mcode; obj["searchparam1"] = param1; obj["searchparam2"] = param2; obj["searchparam3"] = param3; dataList.Add(obj); } searchResult.data = dataList; } analyzer.Close(); searcher.Dispose(); directory.Dispose(); return(searchResult); }
public virtual void TestSimple() { System.String[] fields = new System.String[] { "b", "t" }; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer()); Query q = mfqp.Parse("one"); Assert.AreEqual("b:one t:one", q.ToString()); q = mfqp.Parse("one two"); Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString()); q = mfqp.Parse("+one +two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("+one -two -three"); Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString()); q = mfqp.Parse("one^2 two"); Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~ two"); Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~0.8 two^2"); Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString()); q = mfqp.Parse("one* two*"); Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString()); q = mfqp.Parse("[a TO c] two"); Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString()); q = mfqp.Parse("w?ldcard"); Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString()); q = mfqp.Parse("\"foo bar\""); Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString()); q = mfqp.Parse("\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString()); // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field. q = mfqp.Parse("b:\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4", q.ToString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two"); Assert.AreEqual("(b:one t:one) f:two", q.ToString()); // AND mode: mfqp.SetDefaultOperator(QueryParser.AND_OPERATOR); q = mfqp.Parse("one two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString()); }