public string Search(string strQuery) { string result = string.Empty; Lucene.Net.Index.IndexReader reader = Lucene.Net.Index.IndexReader.Open(Server.MapPath(System.Configuration.ConfigurationManager.AppSettings["IndexingArticle"])); Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("ArticleDetail", new Lucene.Net.Analysis.Standard.StandardAnalyzer()); Lucene.Net.Search.Query query = parser.Parse(strQuery); Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(reader); Lucene.Net.Search.Hits hits = searcher.Search(query); Lucene.Net.Highlight.QueryScorer score = new Lucene.Net.Highlight.QueryScorer(query); Lucene.Net.Highlight.SimpleHTMLFormatter formater = new Lucene.Net.Highlight.SimpleHTMLFormatter("<span class='Highlight'>", "</span>"); Lucene.Net.Highlight.Highlighter highlighter = new Lucene.Net.Highlight.Highlighter(formater, score); result += "<div align='right' style='background-color:#F0F7F9; padding-right:15px' height='30px'><font style='FONT-WEIGHT: bold; FONT-SIZE: 10pt; COLOR: #005482; FONT-FAMILY: arial'>Kết quả tìm thấy : " + hits.Length() + " </font></div>"; result += "<div style='padding: 10px 10px 10px 10px;'>"; for (int i = 0; i < hits.Length(); i++) { string id = hits.Doc(i).Get("ArticleId"); string title = hits.Doc(i).Get("ArticleTitle"); string detail = hits.Doc(i).Get("ArticleDetail"); Lucene.Net.Analysis.TokenStream ts = (new Lucene.Net.Analysis.Standard.StandardAnalyzer()).TokenStream("ArticleDetail", new System.IO.StringReader(detail)); result += string.Format("<div align='left'><font style='FONT-WEIGHT: bold; FONT-SIZE: 10pt; COLOR: #5b5b5b; FONT-FAMILY: arial'><a href='/?ArticleId={0}'>{1}</a></font>", id, title); result += string.Format("<div align='left'><font style='FONT-SIZE: 9pt' face='Arial' color='#005482'>...{0}...</font></div></div></br>", highlighter.GetBestFragment(ts, detail)); } result += "</div>"; reader.Close(); return(result); }
public List <FacetReturn> Filter(Lucene.Net.Search.Query query, List <Util.SearchStringModel> searchQuery, string locationFilter, System.Collections.BitArray baseQuery) { if (InAvailableLocations(locationFilter)) { var stopWatch = new Stopwatch(); if (Config.EnableBucketDebug || Sitecore.ItemBucket.Kernel.Util.Constants.EnableTemporaryBucketDebug) { Diagnostics.Log.Info("Start Extension Facet took : " + stopWatch.ElapsedMilliseconds + "ms", this); } stopWatch.Start(); var returnFacets = this.GetSearch(query, GetFileExtensionsFromIndex().ToList(), searchQuery, locationFilter, baseQuery).Select( facet => new FacetReturn { KeyName = facet.Key, Value = facet.Value.ToString(), Type = "extension", ID = facet.Key }); if (Config.EnableBucketDebug || Sitecore.ItemBucket.Kernel.Util.Constants.EnableTemporaryBucketDebug) { stopWatch.Stop(); Diagnostics.Log.Info("End Extension Facet took : " + stopWatch.ElapsedMilliseconds + "ms", this); } return(returnFacets.ToList()); } return(new List <FacetReturn>()); }
/// <summary> Create a CustomScoreQuery over input subQuery and a {@link ValueSourceQuery}.</summary> /// <param name="subQuery">the sub query whose score is being customized. Must not be null. /// </param> /// <param name="valSrcQueries">value source queries whose scores are used in the custom score /// computation. For most simple/convenient use case these would be /// {@link Lucene.Net.Search.Function.FieldScoreQuery FieldScoreQueries}. /// This parameter is optional - it can be null or even an empty array. /// </param> public CustomScoreQuery(Query subQuery, ValueSourceQuery[] valSrcQueries) { this.subQuery = subQuery; this.valSrcQueries = valSrcQueries != null?valSrcQueries:new ValueSourceQuery[0]; if (subQuery == null) throw new System.ArgumentException("<subquery> must not be null!"); }
public void CustomBridges() { Cloud cloud = new Cloud(); cloud.CustomFieldBridge = ("This is divided by 2"); cloud.CustomStringBridge = ("This is div by 4"); ISession s = OpenSession(); ITransaction tx = s.BeginTransaction(); s.Save(cloud); s.Flush(); tx.Commit(); tx = s.BeginTransaction(); IFullTextSession session = Search.CreateFullTextSession(s); QueryParser parser = new QueryParser("id", new SimpleAnalyzer()); Lucene.Net.Search.Query query = parser.Parse("CustomFieldBridge:This AND CustomStringBridge:This"); IList result = session.CreateFullTextQuery(query).List(); Assert.AreEqual(1, result.Count, "Properties not mapped"); query = parser.Parse("CustomFieldBridge:by AND CustomStringBridge:is"); result = session.CreateFullTextQuery(query).List(); Assert.AreEqual(0, result.Count, "Custom types not taken into account"); s.Delete(s.Get(typeof(Cloud), cloud.Id)); tx.Commit(); s.Close(); }
protected internal override Query GetFieldQuery(System.String field, System.String queryText, int slop) { if (field == null) { System.Collections.ArrayList clauses = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); for (int i = 0; i < fields.Length; i++) { Query q = base.GetFieldQuery(fields[i], queryText); if (q != null) { if (q is PhraseQuery) { ((PhraseQuery)q).SetSlop(slop); } if (q is MultiPhraseQuery) { ((MultiPhraseQuery)q).SetSlop(slop); } clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD)); } } if (clauses.Count == 0) { // happens for stopwords return(null); } return(GetBooleanQuery(clauses, true)); } return(base.GetFieldQuery(field, queryText)); }
public IEnumerable <int> Get(string search, int max = 100, int minScore = 1) { if (!built) { BuildIndexes(); built = true; } try { var dir = FSDirectory.Open(new System.IO.DirectoryInfo(@"C:\lucene")); Lucene.Net.Analysis.Standard.StandardAnalyzer analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser(Lucene.Net.Util.Version.LUCENE_30, "body", analyzer); Lucene.Net.Search.Query query = null; query = parser.Parse(search); var searcher = new Lucene.Net.Search.IndexSearcher(dir); var hits = searcher.Search(query, max); var doc = searcher.Doc(hits.ScoreDocs[0].Doc); var result = hits.ScoreDocs.Where(s => s.Score > minScore).Select(h => int.Parse(searcher.Doc(h.Doc).GetField("id").StringValue)); return(result); } catch (Exception e) { throw; } }
/// <summary> Parses a query, searching on the fields specified. Use this if you need to /// specify certain fields as required, and others as prohibited. /// <p> /// <pre> /// Usage: /// <code> /// String[] fields = { "filename", "contents", "description" }; /// int[] flags = { MultiFieldQueryParser.NORMAL_FIELD, /// MultiFieldQueryParser.REQUIRED_FIELD, /// MultiFieldQueryParser.PROHIBITED_FIELD, }; /// parse(query, fields, flags, analyzer); /// </code> /// </pre> /// /// <p> /// The code above would construct a query: /// <pre> /// <code> /// (filename:query1) +(contents:query2) -(description:query3) /// </code> /// </pre> /// /// </summary> /// <param name="queries">Queries string to parse /// </param> /// <param name="fields">Fields to search on /// </param> /// <param name="flags">Flags describing the fields /// </param> /// <param name="analyzer">Analyzer to use /// </param> /// <throws> ParseException if query parsing fails </throws> /// <throws> TokenMgrError if query parsing fails </throws> /// <throws> IllegalArgumentException if the length of the queries, fields, and flags array differ </throws> /// <deprecated> use {@link #Parse(String[], String[], BooleanClause.Occur[], Analyzer)} instead /// </deprecated> public static Query Parse(System.String[] queries, System.String[] fields, int[] flags, Analyzer analyzer) { if (!(queries.Length == fields.Length && queries.Length == flags.Length)) { throw new System.ArgumentException("queries, fields, and flags array have have different length"); } BooleanQuery bQuery = new BooleanQuery(); for (int i = 0; i < fields.Length; i++) { QueryParser qp = new QueryParser(fields[i], analyzer); Query q = qp.Parse(queries[i]); int flag = flags[i]; switch (flag) { case REQUIRED_FIELD: bQuery.Add(q, BooleanClause.Occur.MUST); break; case PROHIBITED_FIELD: bQuery.Add(q, BooleanClause.Occur.MUST_NOT); break; default: bQuery.Add(q, BooleanClause.Occur.SHOULD); break; } } return(bQuery); }
public override List <SkinnyItem> RunQuery(Lucene.Net.Search.Query query, bool showAllVersions, string sortField, bool reverse, int start, int end, out int totalResults) { using (var scope = QueryTraceHelper.GetQueryTraceScope(query)) { return(base.RunQuery(query, showAllVersions, sortField, reverse, start, end, out totalResults)); } }
public override List <SkinnyItem> RunQuery(Lucene.Net.Search.Query query, bool showAllVersions = false, string sortField = "", bool reverse = true, int start = 0, int end = 0) { using (var scope = QueryTraceHelper.GetQueryTraceScope(query)) { return(base.RunQuery(query, showAllVersions, sortField, reverse, start, end)); } }
//fieldname MUST be interned prior to this call private static void GetTerms(Query query, System.Collections.Hashtable terms, bool prohibited, System.String fieldName) { try { if (query is BooleanQuery) { GetTermsFromBooleanQuery((BooleanQuery)query, terms, prohibited, fieldName); } else if (query is FilteredQuery) { GetTermsFromFilteredQuery((FilteredQuery)query, terms, prohibited, fieldName); } else { System.Collections.Hashtable nonWeightedTerms = new System.Collections.Hashtable(); query.ExtractTerms(nonWeightedTerms); System.Collections.IDictionaryEnumerator iter = nonWeightedTerms.GetEnumerator(); while (iter.MoveNext()) { Term term = (Term)iter.Value; if ((fieldName == null) || (term.Field() == fieldName)) { WeightedTerm temp = new WeightedTerm(query.GetBoost(), term.Text()); terms.Add(temp, temp); } } } } catch (System.NotSupportedException ignore) { //this is non-fatal for our purposes } }
public virtual void TestStaticMethod2Old() { System.String[] fields = new System.String[] { "b", "t" }; //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD}; BooleanClause.Occur[] flags = new BooleanClause.Occur[] { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT }; MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer()); Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer()); //, fields, flags, new StandardAnalyzer()); Assert.AreEqual("+b:one -t:one", q.ToString()); q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer()); Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString()); try { BooleanClause.Occur[] flags2 = new BooleanClause.Occur[] { BooleanClause.Occur.MUST }; q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer()); Assert.Fail(); } catch (System.ArgumentException e) { // expected exception, array length differs } }
private void BtnFind_Click(object sender, EventArgs e) { if (string.IsNullOrEmpty(TxtWhere.Text)) { return; } if (string.IsNullOrEmpty(TxtWhere.Text.Trim())) { TxtWhere.Clear(); return; } LuceneSearch search = new LuceneSearch(); if ("cmd:thongke" == TxtWhere.Text) { webBrowser1.DocumentText = search.GetAllDoc(); toolStripStatusLabel1.Text = " All file names"; } else { Lucene.Net.Search.Query query = search.GetTextQuery(TxtWhere.Text.Trim()); if (query != null) { lst = search.Search(query, MyConfig.GetMaxFindResult()); } toolStripStatusLabel1.Text = string.Format(" Found {0} files", lst.Count); webBrowser1.DocumentText = MyExt.ToShortHtml(lst); } search.LuceneDispose(); }
public virtual void TestSimple() { System.String[] fields = new System.String[] { "b", "t" }; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer()); Query q = mfqp.Parse("one"); Assert.AreEqual("b:one t:one", q.ToString()); q = mfqp.Parse("one two"); Assert.AreEqual("(b:one t:one) (b:two t:two)", q.ToString()); q = mfqp.Parse("+one +two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("+one -two -three"); Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString()); q = mfqp.Parse("one^2 two"); Assert.AreEqual("((b:one t:one)^2.0) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~ two"); Assert.AreEqual("(b:one~0.5 t:one~0.5) (b:two t:two)", q.ToString()); q = mfqp.Parse("one~0.8 two^2"); Assert.AreEqual("(b:one~0.8 t:one~0.8) ((b:two t:two)^2.0)", q.ToString()); q = mfqp.Parse("one* two*"); Assert.AreEqual("(b:one* t:one*) (b:two* t:two*)", q.ToString()); q = mfqp.Parse("[a TO c] two"); Assert.AreEqual("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.ToString()); q = mfqp.Parse("w?ldcard"); Assert.AreEqual("b:w?ldcard t:w?ldcard", q.ToString()); q = mfqp.Parse("\"foo bar\""); Assert.AreEqual("b:\"foo bar\" t:\"foo bar\"", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q.ToString()); q = mfqp.Parse("\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4 t:\"foo bar\"~4", q.ToString()); // LUCENE-1213: MultiFieldQueryParser was ignoring slop when phrase had a field. q = mfqp.Parse("b:\"foo bar\"~4"); Assert.AreEqual("b:\"foo bar\"~4", q.ToString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two"); Assert.AreEqual("(b:one t:one) f:two", q.ToString()); // AND mode: mfqp.SetDefaultOperator(QueryParser.AND_OPERATOR); q = mfqp.Parse("one two"); Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\""); Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString()); }
/// <summary> Query should be rewritten for wild/fuzzy support. /// /// </summary> /// <param name="query"> /// </param> /// <returns> payloads Collection /// </returns> /// <throws> IOException </throws> public virtual ICollection <byte[]> GetPayloadsForQuery(Query query, IState state) { ICollection <byte[]> payloads = new List <byte[]>(); QueryToSpanQuery(query, payloads, state); return(payloads); }
public virtual void TestDemo_Renamed() { Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT); // Store the index in memory: Directory directory = new RAMDirectory(); // To store an index on disk, use this instead: //Directory directory = FSDirectory.open("/tmp/testindex"); IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000)); Document doc = new Document(); System.String text = "This is the text to be indexed."; doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED)); iwriter.AddDocument(doc); iwriter.Close(); // Now search the index: IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true // Parse a simple query that searches for "text": QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fieldname", analyzer); Query query = parser.Parse("text"); ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // Iterate through the results: for (int i = 0; i < hits.Length; i++) { Document hitDoc = isearcher.Doc(hits[i].Doc); Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed."); } isearcher.Close(); directory.Close(); }
/// <summary> /// Search for files. /// </summary> /// <param name="queryText">The query text.</param> /// <returns>The files that match the query text.</returns> public SourceFile[] Search(string queryText) { Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser( Lucene.Net.Util.Version.LUCENE_30, "body", _analyzer); Lucene.Net.Search.Query query = parser.Parse(queryText); using (Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(_directory, true)) { Lucene.Net.Search.TopDocs result = searcher.Search(query, int.MaxValue); List <SourceFile> files = new List <SourceFile>(); foreach (Lucene.Net.Search.ScoreDoc d in result.ScoreDocs) { Lucene.Net.Documents.Document doc = searcher.Doc(d.Doc); files.Add(new SourceFile( doc.Get("id"), doc.Get("type"), doc.Get("name"), doc.Get("fileName"), null)); } return(files.ToArray()); } }
/*(non-Javadoc) <see cref="Lucene.Net.Search.Query.rewrite(Lucene.Net.Index.IndexReader) */ public override Query Rewrite(IndexReader reader) { CustomScoreQuery clone = null; Query sq = subQuery.Rewrite(reader); if (sq != subQuery) { clone = (CustomScoreQuery)Clone(); clone.subQuery = sq; } for (int i = 0; i < valSrcQueries.Length; i++) { ValueSourceQuery v = (ValueSourceQuery)valSrcQueries[i].Rewrite(reader); if (v != valSrcQueries[i]) { if (clone == null) { clone = (CustomScoreQuery)Clone(); } clone.valSrcQueries[i] = v; } } return((clone == null) ? this : clone); }
} // End Sub BuildIndex // https://lucenenet.apache.org/ // https://www.codeproject.com/Articles/609980/Small-Lucene-NET-Demo-App // https://stackoverflow.com/questions/12600196/lucene-how-to-index-file-names private static void SearchPath(string phrase, string indexPath) { Lucene.Net.Util.LuceneVersion version = Lucene.Net.Util.LuceneVersion.LUCENE_48; Lucene.Net.Store.Directory luceneIndexDirectory = Lucene.Net.Store.FSDirectory.Open(indexPath); Lucene.Net.Index.IndexReader r = Lucene.Net.Index.DirectoryReader.Open(luceneIndexDirectory); Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(r); Lucene.Net.Analysis.Analyzer analyzer = GetWrappedAnalyzer(); Lucene.Net.QueryParsers.Classic.QueryParser parser = new Lucene.Net.QueryParsers.Classic.QueryParser(version, "file_name", analyzer); // https://stackoverflow.com/questions/15170097/how-to-search-across-all-the-fields // Lucene.Net.QueryParsers.Classic.MultiFieldQueryParser parser = new Lucene.Net.QueryParsers.Classic.MultiFieldQueryParser(version, GetFields(r), analyzer); Lucene.Net.Search.Query query = parser.Parse(Lucene.Net.QueryParsers.Classic.QueryParser.Escape(phrase)); Lucene.Net.Search.ScoreDoc[] hits = searcher.Search(query, 10).ScoreDocs; foreach (Lucene.Net.Search.ScoreDoc hit in hits) { Lucene.Net.Documents.Document foundDoc = searcher.Doc(hit.Doc); System.Console.WriteLine(hit.Score); string full_name = foundDoc.Get("full_name"); System.Console.WriteLine(full_name); // string favoritePhrase = foundDoc.Get("favoritePhrase"); // System.Console.WriteLine(favoritePhrase); } // Next hit } // End Sub SearchPath
/// <summary> /// Execute a Lucene query and retrieve managed objects of type entities (or their indexed subclasses /// If entities is empty, include all indexed entities /// </summary> /// <param name="luceneQuery"></param> /// <param name="entities">entities must be immutable for the lifetime of the query object</param> /// <returns></returns> public IFullTextQuery CreateFullTextQuery(Lucene.Net.Search.Query luceneQuery, params System.Type[] entities) { using (new SessionIdLoggingContext(sessionImplementor.SessionId)) { return(new FullTextQueryImpl(luceneQuery, entities, session, null)); } }
/// <summary> /// Query should be rewritten for wild/fuzzy support. /// /// @param query /// @return payloads Collection /// @throws IOException /// </summary> public ICollection <byte[]> GetPayloadsForQuery(Query query) { ICollection <byte[]> payloads = new List <byte[]>(); QueryToSpanQuery(query, payloads); return(payloads); }
public virtual void TestBoostsSimple() { System.Collections.IDictionary boosts = new System.Collections.Hashtable(); boosts["b"] = (float)5; boosts["t"] = (float)10; System.String[] fields = new System.String[] { "b", "t" }; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer(), boosts); //Check for simple Query q = mfqp.Parse("one"); Assert.AreEqual("b:one^5.0 t:one^10.0", q.ToString()); //Check for AND q = mfqp.Parse("one AND two"); Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.ToString()); //Check for OR q = mfqp.Parse("one OR two"); Assert.AreEqual("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.ToString()); //Check for AND and a field q = mfqp.Parse("one AND two AND foo:test"); Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.ToString()); q = mfqp.Parse("one^3 AND two^4"); Assert.AreEqual("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.ToString()); }
public virtual void TestBoostsSimple() { IDictionary <string, float> boosts = new Dictionary <string, float>(); boosts["b"] = (float)5; boosts["t"] = (float)10; string[] fields = new string[] { "b", "t" }; MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, fields, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), boosts); //Check for simple Query q = mfqp.Parse("one"); Assert.AreEqual("b:one^5.0 t:one^10.0", q.ToString()); //Check for AND q = mfqp.Parse("one AND two"); Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.ToString()); //Check for OR q = mfqp.Parse("one OR two"); Assert.AreEqual("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.ToString()); //Check for AND and a field q = mfqp.Parse("one AND two AND foo:test"); Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.ToString()); q = mfqp.Parse("one^3 AND two^4"); Assert.AreEqual("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.ToString()); }
public Dictionary <string, int> GetSearch(Lucene.Net.Search.Query query, List <string> filter, List <Util.SearchStringModel> searchQuery, string locationFilter, System.Collections.BitArray baseQuery) { using (var searcher = new IndexSearcher(ItemBucket.Kernel.Util.Constants.Index.Name)) { var results = searcher.RunFacet(query, false, false, 0, 0, "dimensions", filter, baseQuery, locationFilter); return(results); } }
private void InitBlock(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff, IndexReader[] lastReader) { this.lastDoc = lastDoc; this.q = q; this.s = s; this.maxDiff = maxDiff; this.lastReader = lastReader; }
internal DrillSidewaysQuery(Query baseQuery, Collector drillDownCollector, Collector[] drillSidewaysCollectors, Query[] drillDownQueries, bool scoreSubDocsAtOnce) { this.baseQuery = baseQuery; this.drillDownCollector = drillDownCollector; this.drillSidewaysCollectors = drillSidewaysCollectors; this.drillDownQueries = drillDownQueries; this.scoreSubDocsAtOnce = scoreSubDocsAtOnce; }
public IFullTextQuery CreateFullTextQuery <TEntity>(string queryString) { using (new SessionIdLoggingContext(sessionImplementor.SessionId)) { QueryParser queryParser = new QueryParser(Environment.LuceneVersion, string.Empty, new StandardAnalyzer(Environment.LuceneVersion)); Lucene.Net.Search.Query query = queryParser.Parse(queryString); return(CreateFullTextQuery(query, typeof(TEntity))); } }
public IFullTextQuery CreateFullTextQuery <TEntity>(string defaultField, string queryString) { using (new SessionIdLoggingContext(sessionImplementor.SessionId)) { QueryParser queryParser = new QueryParser(defaultField, new StandardAnalyzer()); Lucene.Net.Search.Query query = queryParser.Parse(queryString); return(CreateFullTextQuery(query, typeof(TEntity))); } }
public List <LuceneData> MemberSearch(string searchTerm) { var searchData = new List <LuceneData>(); try { Lucene.Net.Store.Directory dir = Lucene.Net.Store.FSDirectory.GetDirectory(_indexFileLocation); //create an analyzer to process the text Lucene.Net.Analysis.Analyzer analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(); //create the query parser, with the default search feild set to "content" Lucene.Net.QueryParsers.QueryParser queryParser = new Lucene.Net.QueryParsers.QueryParser("SearchContent", analyzer); //parse the query string into a Query object Lucene.Net.Search.Query query = queryParser.Parse(searchTerm); //create an index searcher that will perform the search //Lucene.Net.Index.IndexReader indexReader = Lucene.Net.Index.IndexReader.Open(dir, true); Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(dir, true); ////build a query object //Lucene.Net.Index.Term luceneSearchTerm = new Lucene.Net.Index.Term("searchContent", searchTerm); //Lucene.Net.Search.Query query = new Lucene.Net.Search.TermQuery(luceneSearchTerm); //execute the query Lucene.Net.Search.Hits hits = searcher.Search(query); //int resultCount = hits.Length(); //if (resultCount > 1000){ // resultCount = 1000; //} //iterate over the results. for (int i = 0; i < hits.Length(); i++) { Lucene.Net.Documents.Document doc = hits.Doc(i); searchData.Add(new LuceneData { MemberID = Convert.ToInt32(doc.Get("MemberID")), FirstName = doc.Get("FirstName"), LastName = doc.Get("LastName"), CompanyName = doc.Get("CompanyName"), City = doc.Get("City"), State = doc.Get("State"), PostalCode = doc.Get("PostalCode") }); } } catch (Exception ex) { } return(searchData); }
public string SearchAndPaging(string strQuery, string index) { string result = string.Empty; try { List <SearchArticle> searchArticleList = new List <SearchArticle>(); PSCPortal.CMS.ArticleCollection ArticleList = ArticleCollection.GetArticleCollectionPublish(); string nameSub = Libs.Ultility.GetSubDomain() == string.Empty ? "HomePage" : Libs.Ultility.GetSubDomain(); SubDomain subDomain = PSCPortal.Engine.SubDomain.GetSubByName(nameSub); PageCollection pagesBelongTo = subDomain.GetPagesBelongTo(); string strId = string.Empty; foreach (var page in pagesBelongTo) { foreach (var ar in ArticleList.Where(ar => ar.PageId == page.Id)) { strId += ar.Id + " OR "; } if (strId.Length > 0) { strId = strId.Remove(strId.Length - 3, 3); } } int pageIndex = Int32.Parse(index); string strSearch = " ArticleDetail:(" + strQuery + ") AND ArticleId:" + "( " + strId + " )"; Lucene.Net.Index.IndexReader reader = Lucene.Net.Index.IndexReader.Open(Server.MapPath(System.Configuration.ConfigurationManager.AppSettings["IndexingArticle"])); Lucene.Net.QueryParsers.QueryParser parser = new Lucene.Net.QueryParsers.QueryParser("ArticleDetail", new Lucene.Net.Analysis.Standard.StandardAnalyzer()); Lucene.Net.Search.Query query = parser.Parse(strSearch); Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(reader); Lucene.Net.Search.Hits hits = searcher.Search(query); Lucene.Net.Highlight.QueryScorer score = new Lucene.Net.Highlight.QueryScorer(query); Lucene.Net.Highlight.SimpleHTMLFormatter formater = new Lucene.Net.Highlight.SimpleHTMLFormatter("<span class='Highlight'>", "</span>"); Lucene.Net.Highlight.Highlighter highlighter = new Lucene.Net.Highlight.Highlighter(formater, score); result += hits.Length() + "_" + "<div class='blog_news'><div class='topic_news_title1'><div class='topic_news_title'><a href='#'>Kết quả tìm thấy: " + hits.Length() + "</a></div></div>"; result += "<div class='ct_topic_l'><div class='ct_topic_r1'>"; for (int i = pageIndex * 20 - 20; i < pageIndex * 20 && i < hits.Length(); i++) { string detail = hits.Doc(i).Get("ArticleDetail"); Lucene.Net.Analysis.TokenStream ts = (new Lucene.Net.Analysis.Standard.StandardAnalyzer()).TokenStream("ArticleDetail", new System.IO.StringReader(detail)); SearchArticle searchArticle = new SearchArticle(); searchArticle.Id = hits.Doc(i).Get("ArticleId");; searchArticle.Title = hits.Doc(i).Get("ArticleTitle"); searchArticle.Highligth = highlighter.GetBestFragment(ts, detail); searchArticleList.Add(searchArticle); } reader.Close(); JavaScriptSerializer serializer = new JavaScriptSerializer(); Dictionary <string, object> resultDic = new Dictionary <string, object>(); resultDic["Count"] = hits.Length(); resultDic["Data"] = searchArticleList; result = serializer.Serialize(resultDic); } catch (Exception e) { } return(result); }
/// <summary> Create a CustomScoreQuery over input subQuery and a <see cref="ValueSourceQuery" />.</summary> /// <param name="subQuery">the sub query whose score is being customized. Must not be null. /// </param> /// <param name="valSrcQueries">value source queries whose scores are used in the custom score /// computation. For most simple/convenient use case these would be /// <see cref="Lucene.Net.Search.Function.FieldScoreQuery">FieldScoreQueries</see>. /// This parameter is optional - it can be null or even an empty array. /// </param> public CustomScoreQuery(Query subQuery, params ValueSourceQuery[] valSrcQueries) { this.subQuery = subQuery; this.valSrcQueries = valSrcQueries ?? (new ValueSourceQuery[0]); if (subQuery == null) { throw new System.ArgumentException("<subquery> must not be null!"); } }
/*(non-Javadoc) @see Lucene.Net.Search.Query#rewrite(Lucene.Net.Index.IndexReader) */ public override Query Rewrite(IndexReader reader) { subQuery = subQuery.Rewrite(reader); for (int i = 0; i < valSrcQueries.Length; i++) { valSrcQueries[i] = (ValueSourceQuery)valSrcQueries[i].Rewrite(reader); } return(this); }
/// <summary> Create a CustomScoreQuery over input subQuery and a {@link ValueSourceQuery}.</summary> /// <param name="subQuery">the sub query whose score is being customed. Must not be null. /// </param> /// <param name="valSrcQueries">value source queries whose scores are used in the custom score /// computation. For most simple/convineient use case these would be /// {@link Lucene.Net.Search.Function.FieldScoreQuery FieldScoreQueries}. /// This parameter is optional - it can be null or even an empty array. /// </param> public CustomScoreQuery(Query subQuery, ValueSourceQuery[] valSrcQueries) : base() { this.subQuery = subQuery; this.valSrcQueries = valSrcQueries != null ? valSrcQueries : new ValueSourceQuery[0]; if (subQuery == null) { throw new System.ArgumentException("<subquery> must not be null!"); } }
/// <summary> Extracts all terms texts of a given Query into an array of WeightedTerms /// /// </summary> /// <param name="query"> Query to extract term texts from /// </param> /// <param name="reader">used to compute IDF which can be used to a) score selected fragments better /// b) use graded highlights eg chaning intensity of font color /// </param> /// <param name="fieldName">the field on which Inverse Document Frequency (IDF) calculations are based /// </param> /// <returns> an array of the terms used in a query, plus their weights. /// </returns> public static WeightedTerm[] GetIdfWeightedTerms(Query query, IndexReader reader, System.String fieldName) { WeightedTerm[] terms = GetTerms(query, false, fieldName); int totalNumDocs = reader.NumDocs(); for (int i = 0; i < terms.Length; i++) { try { int docFreq = reader.DocFreq(new Term(fieldName, terms[i].term)); //IDF algorithm taken from DefaultSimilarity class float idf = (float) (System.Math.Log((float) totalNumDocs / (double) (docFreq + 1)) + 1.0); terms[i].weight *= idf; } catch (System.IO.IOException e) { //ignore } } return terms; }
private void LogResult(System.String msg, IndexSearcher s, Query q, int doc, float score1) { QueryUtils.Check(q, s); Log(msg + " " + score1); Log("Explain by: " + q); Log(s.Explain(q, doc)); }
/// <summary> Overrides superclass to ignore matches and focus on non-matches /// /// </summary> /// <seealso cref="CheckHits.checkNoMatchExplanations"> /// </seealso> public override void Qtest(Query q, int[] expDocNrs) { CheckHits.CheckNoMatchExplanations(q, FIELD, searcher, expDocNrs); }
/// <summary> Create a CustomScoreQuery over input subQuery and a {@link ValueSourceQuery}.</summary> /// <param name="subQuery">the sub query whose score is being customed. Must not be null. /// </param> /// <param name="valSrcQuery">a value source query whose scores are used in the custom score /// computation. For most simple/convineient use case this would be a /// {@link Lucene.Net.Search.Function.FieldScoreQuery FieldScoreQuery}. /// This parameter is optional - it can be null or even an empty array. /// </param> public CustomScoreQuery(Query subQuery, ValueSourceQuery valSrcQuery):this(subQuery, valSrcQuery != null?new ValueSourceQuery[]{valSrcQuery}:new ValueSourceQuery[0]) { }
/// <summary> </summary> /// <param name="query">a Lucene query (ideally rewritten using query.rewrite /// before being passed to this class and the searcher) /// </param> /// <param name="reader">used to compute IDF which can be used to a) score selected fragments better /// b) use graded highlights eg set font color intensity /// </param> /// <param name="fieldName">the field on which Inverse Document Frequency (IDF) calculations are based /// </param> public QueryScorer(Query query, IndexReader reader, System.String fieldName):this(QueryTermExtractor.GetIdfWeightedTerms(query, reader, fieldName)) { }
// verify results are as expected. private void VerifyResults(float boost, IndexSearcher s, System.Collections.Hashtable h1, System.Collections.Hashtable h2customNeutral, System.Collections.Hashtable h3CustomMul, System.Collections.Hashtable h4CustomAdd, System.Collections.Hashtable h5CustomMulAdd, Query q1, Query q2, Query q3, Query q4, Query q5) { // verify numbers of matches Log("#hits = " + h1.Count); Assert.AreEqual(h1.Count, h2customNeutral.Count, "queries should have same #hits"); Assert.AreEqual(h1.Count, h3CustomMul.Count, "queries should have same #hits"); Assert.AreEqual(h1.Count, h4CustomAdd.Count, "queries should have same #hits"); Assert.AreEqual(h1.Count, h5CustomMulAdd.Count, "queries should have same #hits"); // verify scores ratios for (System.Collections.IEnumerator it = h1.Keys.GetEnumerator(); it.MoveNext(); ) { System.Int32 x = (System.Int32) it.Current; int doc = x; Log("doc = " + doc); float fieldScore = ExpectedFieldScore(s.GetIndexReader().Document(doc).Get(ID_FIELD)); Log("fieldScore = " + fieldScore); Assert.IsTrue(fieldScore > 0, "fieldScore should not be 0"); float score1 = (float) ((System.Single) h1[x]); LogResult("score1=", s, q1, doc, score1); float score2 = (float) ((System.Single) h2customNeutral[x]); LogResult("score2=", s, q2, doc, score2); Assert.AreEqual(boost * score1, score2, TEST_SCORE_TOLERANCE_DELTA, "same score (just boosted) for neutral"); float score3 = (float) ((System.Single) h3CustomMul[x]); LogResult("score3=", s, q3, doc, score3); Assert.AreEqual(boost * fieldScore * score1, score3, TEST_SCORE_TOLERANCE_DELTA, "new score for custom mul"); float score4 = (float) ((System.Single) h4CustomAdd[x]); LogResult("score4=", s, q4, doc, score4); Assert.AreEqual(boost * (fieldScore + score1), score4, TEST_SCORE_TOLERANCE_DELTA, "new score for custom add"); float score5 = (float) ((System.Single) h5CustomMulAdd[x]); LogResult("score5=", s, q5, doc, score5); Assert.AreEqual(boost * fieldScore * (score1 + fieldScore), score5, TEST_SCORE_TOLERANCE_DELTA, "new score for custom mul add"); } }
// constructor internal CustomAddQuery(Query q, ValueSourceQuery qValSrc):base(q, qValSrc) { }
protected internal virtual void AddClause(System.Collections.IList clauses, int conj, int mods, Query q) { bool required, prohibited; // If this term is introduced by AND, make the preceding term required, // unless it's already prohibited if (clauses.Count > 0 && conj == CONJ_AND) { BooleanClause c = (BooleanClause) clauses[clauses.Count - 1]; if (!c.IsProhibited()) c.SetOccur(BooleanClause.Occur.MUST); } if (clauses.Count > 0 && operator_Renamed == AND_OPERATOR && conj == CONJ_OR) { // If this term is introduced by OR, make the preceding term optional, // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b) // notice if the input is a OR b, first term is parsed as required; without // this modification a OR b would parsed as +a OR b BooleanClause c = (BooleanClause) clauses[clauses.Count - 1]; if (!c.IsProhibited()) c.SetOccur(BooleanClause.Occur.SHOULD); } // We might have been passed a null query; the term might have been // filtered away by the analyzer. if (q == null) return ; if (operator_Renamed == OR_OPERATOR) { // We set REQUIRED if we're introduced by AND or +; PROHIBITED if // introduced by NOT or -; make sure not to set both. prohibited = (mods == MOD_NOT); required = (mods == MOD_REQ); if (conj == CONJ_AND && !prohibited) { required = true; } } else { // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED // if not PROHIBITED and not introduced by OR prohibited = (mods == MOD_NOT); required = (!prohibited && conj != CONJ_OR); } if (required && !prohibited) clauses.Add(NewBooleanClause(q, BooleanClause.Occur.MUST)); else if (!required && !prohibited) clauses.Add(NewBooleanClause(q, BooleanClause.Occur.SHOULD)); else if (!required && prohibited) clauses.Add(NewBooleanClause(q, BooleanClause.Occur.MUST_NOT)); else throw new System.SystemException("Clause cannot be both required and prohibited"); }
private void CheckHits(Query query, int[] results) { Lucene.Net.Search.CheckHits.CheckHits_Renamed_Method(query, field, searcher, results); }
private void AddDeleteQuery(Query query, int docID) { lock (this) { deletesInRAM.queries[query] = flushedDocCount + docID; } }
internal bool BufferDeleteQueries(Query[] queries) { lock (this) { WaitReady(null); for (int i = 0; i < queries.Length; i++) AddDeleteQuery(queries[i], numDocsInRAM); return TimeToFlushDeletes(); } }
/// <summary> Deletes the document(s) matching any of the provided queries. /// All deletes are flushed at the same time. /// /// <p/><b>NOTE</b>: if this method hits an OutOfMemoryError /// you should immediately close the writer. See <a /// href="#OOME">above</a> for details.<p/> /// /// </summary> /// <param name="queries">array of queries to identify the documents /// to be deleted /// </param> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public virtual void DeleteDocuments(Query[] queries) { EnsureOpen(); bool doFlush = docWriter.BufferDeleteQueries(queries); if (doFlush) Flush(true, false, false); }
private void QueryTest(Query query) { ScoreDoc[] parallelHits = parallel.Search(query, null, 1000).scoreDocs; ScoreDoc[] singleHits = single.Search(query, null, 1000).scoreDocs; Assert.AreEqual(parallelHits.Length, singleHits.Length); for (int i = 0; i < parallelHits.Length; i++) { Assert.AreEqual(parallelHits[i].score, singleHits[i].score, 0.001f); Document docParallel = parallel.Doc(parallelHits[i].doc); Document docSingle = single.Doc(singleHits[i].doc); Assert.AreEqual(docParallel.Get("f1"), docSingle.Get("f1")); Assert.AreEqual(docParallel.Get("f2"), docSingle.Get("f2")); Assert.AreEqual(docParallel.Get("f3"), docSingle.Get("f3")); Assert.AreEqual(docParallel.Get("f4"), docSingle.Get("f4")); } }
/// <summary> </summary> /// <param name="query">a Lucene query (ideally rewritten using query.rewrite /// before being passed to this class and the searcher) /// </param> public QueryScorer(Query query):this(QueryTermExtractor.GetTerms(query)) { }
private void AddDeleteQuery(Query query, int docID) { lock (this) { deletesInRAM.queries[query] = (System.Int32) (flushedDocCount + docID); deletesInRAM.AddBytesUsed(BYTES_PER_DEL_QUERY); } }
internal bool BufferDeleteQuery(Query query) { lock (this) { WaitReady(null); AddDeleteQuery(query, numDocsInRAM); return TimeToFlushDeletes(); } }
protected internal virtual void AddClause(System.Collections.ArrayList clauses, int conj, int mods, Query q) { AddClause((System.Collections.IList) clauses, conj, mods, q); }
protected internal virtual void AddClause(List<BooleanClause> clauses, int conj, int mods, Query q) { AddClause(clauses, conj, mods, q); }
/// <summary> Builds a new BooleanClause instance</summary> /// <param name="q">sub query /// </param> /// <param name="occur">how this clause should occur when matching documents /// </param> /// <returns> new BooleanClause instance /// </returns> protected internal virtual BooleanClause NewBooleanClause(Query q, BooleanClause.Occur occur) { return new BooleanClause(q, occur); }
public virtual void TestGetRangeFragments() { System.String queryString = FIELD_NAME + ":[kannedy TO kznnedy]"; //Need to explicitly set the QueryParser property to use RangeQuery rather than RangeFilters QueryParser parser = new QueryParser(FIELD_NAME, new StandardAnalyzer()); parser.SetUseOldRangeQuery(true); query = parser.Parse(queryString); DoSearching(query); DoStandardHighlights(); Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found"); }
public DumbQueryWrapper(Query q):base() { this.q = q; }
public virtual void TestMultiSearcher() { //setup index 1 RAMDirectory ramDir1 = new RAMDirectory(); IndexWriter writer1 = new IndexWriter(ramDir1, new StandardAnalyzer(), true); Document d = new Document(); Field f = new Field(FIELD_NAME, "multiOne", Field.Store.YES, Field.Index.TOKENIZED); d.Add(f); writer1.AddDocument(d); writer1.Optimize(); writer1.Close(); IndexReader reader1 = IndexReader.Open(ramDir1); //setup index 2 RAMDirectory ramDir2 = new RAMDirectory(); IndexWriter writer2 = new IndexWriter(ramDir2, new StandardAnalyzer(), true); d = new Document(); f = new Field(FIELD_NAME, "multiTwo", Field.Store.YES, Field.Index.TOKENIZED); d.Add(f); writer2.AddDocument(d); writer2.Optimize(); writer2.Close(); IndexReader reader2 = IndexReader.Open(ramDir2); IndexSearcher[] searchers = new IndexSearcher[2]; searchers[0] = new IndexSearcher(ramDir1); searchers[1] = new IndexSearcher(ramDir2); MultiSearcher multiSearcher = new MultiSearcher(searchers); QueryParser parser = new QueryParser(FIELD_NAME, new StandardAnalyzer()); parser.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); query = parser.Parse("multi*"); System.Console.Out.WriteLine("Searching for: " + query.ToString(FIELD_NAME)); //at this point the multisearcher calls combine(query[]) hits = multiSearcher.Search(query); //query = QueryParser.parse("multi*", FIELD_NAME, new StandardAnalyzer()); Query[] expandedQueries = new Query[2]; expandedQueries[0] = query.Rewrite(reader1); expandedQueries[1] = query.Rewrite(reader2); query = query.Combine(expandedQueries); //create an instance of the highlighter with the tags used to surround highlighted text Highlighter highlighter = new Highlighter(this, new QueryScorer(query)); for (int i = 0; i < hits.Length(); i++) { System.String text = hits.Doc(i).Get(FIELD_NAME); TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text)); System.String highlightedText = highlighter.GetBestFragment(tokenStream, text); System.Console.Out.WriteLine(highlightedText); } Assert.IsTrue(numHighlights == 2, "Failed to find correct number of highlights " + numHighlights + " found"); }
// constructor internal CustomMulAddQuery(Query q, ValueSourceQuery qValSrc1, ValueSourceQuery qValSrc2):base(q, new ValueSourceQuery[]{qValSrc1, qValSrc2}) { }
public virtual void TestOverlapAnalyzer2() { System.String s = "Hi-Speed10 foo"; Query query; Highlighter highlighter; System.String result; query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("foo"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2(), s, 3, "..."); Assert.AreEqual(result, "Hi-Speed10 <B>foo</B>"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("10"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2(), s, 3, "..."); Assert.AreEqual(result, "Hi-Speed<B>10</B> foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2(), s, 3, "..."); Assert.AreEqual(result, "<B>Hi</B>-Speed10 foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("speed"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2(), s, 3, "..."); Assert.AreEqual(result, "Hi-<B>Speed</B>10 foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hispeed"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2(), s, 3, "..."); Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi speed"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2(), s, 3, "..."); Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo"); /////////////////// same tests, just put the bigger overlapping token first query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("foo"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2a(), s, 3, "..."); Assert.AreEqual(result, "Hi-Speed10 <B>foo</B>"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("10"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2a(), s, 3, "..."); Assert.AreEqual(result, "Hi-Speed<B>10</B> foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2a(), s, 3, "..."); Assert.AreEqual(result, "<B>Hi</B>-Speed10 foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("speed"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2a(), s, 3, "..."); Assert.AreEqual(result, "Hi-<B>Speed</B>10 foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hispeed"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2a(), s, 3, "..."); Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo"); query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi speed"); highlighter = new Highlighter(new QueryScorer(query)); result = highlighter.GetBestFragments(GetTS2a(), s, 3, "..."); Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo"); }
private bool strict = false; // if true, valueSource part of query does not take part in weights normalization. /// <summary> Create a CustomScoreQuery over input subQuery.</summary> /// <param name="subQuery">the sub query whose scored is being customed. Must not be null. /// </param> public CustomScoreQuery(Query subQuery):this(subQuery, new ValueSourceQuery[0]) { }
public virtual void DoSearching(System.String queryString) { QueryParser parser = new QueryParser(FIELD_NAME, new StandardAnalyzer()); parser.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); query = parser.Parse(queryString); DoSearching(query); }
/// <summary> </summary> /// <param name="query">a Lucene query (ideally rewritten using query.rewrite /// before being passed to this class and the searcher) /// </param> /// <param name="fieldName">the Field name which is used to match Query terms /// </param> public QueryScorer(Query query, System.String fieldName):this(QueryTermExtractor.GetTerms(query, false, fieldName)) { }
public virtual void DoSearching(Query unReWrittenQuery) { searcher = new IndexSearcher(ramDir); //for any multi-term queries to work (prefix, wildcard, range,fuzzy etc) you must use a rewritten query! query = unReWrittenQuery.Rewrite(reader); System.Console.Out.WriteLine("Searching for: " + query.ToString(FIELD_NAME)); hits = searcher.Search(query); }