public SpanWeight(SpanQuery query, Searcher searcher) { this.similarity = query.GetSimilarity(searcher); this.query = query; this.terms = query.GetTerms(); idf = this.query.GetSimilarity(searcher).Idf(terms, searcher); }
public SpanWeight(SpanQuery query, Searcher searcher) { this.similarity = query.GetSimilarity(searcher); this.query = query; terms = new System.Collections.Hashtable(); query.ExtractTerms(terms); System.Collections.ArrayList tmp = new System.Collections.ArrayList(terms.Values); idf = this.query.GetSimilarity(searcher).Idf(tmp, searcher); }
public CustomWeight(CustomScoreQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); this.subQueryWeight = Enclosing_Instance.subQuery.Weight(searcher); this.valSrcWeights = new Weight[Enclosing_Instance.valSrcQueries.Length]; for (int i = 0; i < Enclosing_Instance.valSrcQueries.Length; i++) { this.valSrcWeights[i] = Enclosing_Instance.valSrcQueries[i].CreateWeight(searcher); } this.qStrict = Enclosing_Instance.strict; }
public static Lucene.Net.Search.Hits search(Lucene.Net.Search.Query query) { Lucene.Net.Search.Hits hits = null; lock (my_lock) // prevent contention between searches and writing? { if (searcher == null) { searcher = new Lucene.Net.Search.IndexSearcher(MyLucene.index_path); } hits = searcher.Search(query); } return(hits); }
public static void searchFor(Searcher searcher, string querystr) { QueryParser parser = new QueryParser("body", new StandardAnalyzer()); // could be outside this function Query query = parser.Parse(querystr); var hits = new AnonymousClassCollector(); // more accurate timer var timer = new Stopwatch(); timer.Start(); searcher.Search(query, hits); timer.Stop(); Console.WriteLine("search for [{0}] returned {1} hits in {2}ms )", query, hits.Count, timer.ElapsedMilliseconds); }
public void DeleteIndex(MongoObjectId id) { Lucene.Net.Index.IndexModifier modifier = null; try { if (searcher != null) { try { searcher.Close(); } catch (Exception e) { logger.Error("Exception closing lucene searcher:" + e.Message, e); throw; } searcher = null; } modifier = new Lucene.Net.Index.IndexModifier(CustomAppSettings.SearchIndexFolder, analyzer, false); // same as build, but uses "modifier" instead of write. // uses additional "where" clause for bugid modifier.DeleteDocuments(new Lucene.Net.Index.Term("id", id.ToString())); } catch (Exception e) { logger.Error("Exception closing lucene searcher:" + e.Message, e); } finally { if (modifier != null) { modifier.Flush(); modifier.Close(); } } }
private void CreateIndex(int numHits) { int numDocs = 500; Directory directory = new SeekCountingDirectory(this); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.SetUseCompoundFile(false); writer.SetMaxBufferedDocs(10); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); System.String content; if (i % (numDocs / numHits) == 0) { // add a document that matches the query "term1 term2" content = this.term1 + " " + this.term2; } else if (i % 15 == 0) { // add a document that only contains term1 content = this.term1 + " " + this.term1; } else { // add a document that contains term2 but not term 1 content = this.term3 + " " + this.term2; } doc.Add(new Field(this.field, content, Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); } // make sure the index has only a single segment writer.Optimize(); writer.Close(); SegmentReader reader = SegmentReader.GetOnlySegmentReader(directory); this.searcher = new IndexSearcher(reader); }
public virtual void DoSearching(Query unReWrittenQuery) { searcher = new IndexSearcher(ramDir); //for any multi-term queries to work (prefix, wildcard, range,fuzzy etc) you must use a rewritten query! query = unReWrittenQuery.Rewrite(reader); System.Console.Out.WriteLine("Searching for: " + query.ToString(FIELD_NAME)); hits = searcher.Search(query); }
public override Weight CreateWeight(Searcher searcher, IState state) { return(new CustomWeight(this, searcher, state)); }
/// <summary> This method uses a custom HitCollector implementation which simply prints out /// the docId and score of every matching document. /// /// This simulates the streaming search use case, where all hits are supposed to /// be processed, regardless of their relevance. /// </summary> public static void DoStreamingSearch(Searcher searcher, Query query) { Collector streamingHitCollector = new AnonymousClassCollector(); searcher.Search(query, streamingHitCollector); }
public override Weight CreateWeight(Searcher searcher) { return maskedQuery.CreateWeight(searcher); }
public override Weight CreateWeight(Searcher searcher) { return new BoostingTermWeight(this, this, searcher); }
public SearchModel Search(string searchText) { var result = new SearchModel(); if (string.IsNullOrEmpty(searchText)) { result.Message = "Įveskite paieškos užklausą."; return(result); } var stemmedSearchText = new LithuanianStemmer().Stem(searchText.Trim()); if (string.IsNullOrEmpty(stemmedSearchText)) { result.Message = "Įveskite paieškos užklausą."; return(result); } Lucene.Net.Search.Hits hits = null; try { if (char.IsLetter(stemmedSearchText[stemmedSearchText.Length - 1])) { stemmedSearchText += "*"; } query = parser.Parse(stemmedSearchText); if (searcher == null) { searcher = new Lucene.Net.Search.IndexSearcher(CustomAppSettings.SearchIndexFolder); } hits = searcher.Search(query); } catch (Exception e) { result.Message = "Paieška nepavyko. Pataisykite užklausą. Klaidos pranešimas: " + e.Message; return(result); } Lucene.Net.Highlight.Formatter formatter = new Lucene.Net.Highlight.SimpleHTMLFormatter( "<span class=\"highlightResult\">", "</span>"); var fragmenter = new Lucene.Net.Highlight.SimpleFragmenter(100); var scorer = new Lucene.Net.Highlight.QueryScorer(searcher.Rewrite(query)); var highlighter = new Lucene.Net.Highlight.Highlighter(formatter, scorer); highlighter.SetTextFragmenter(fragmenter); Dictionary <string, int> dict_already_seen_ids = new Dictionary <string, int>(); var list = new List <SearchIndexModel>(); // insert the search results into a temp table which we will join with what's in the database for (int i = 0; i < hits.Length(); i++) { if (dict_already_seen_ids.Count < 100) { Lucene.Net.Documents.Document doc = hits.Doc(i); string id = doc.Get("id"); if (!dict_already_seen_ids.ContainsKey(id)) { dict_already_seen_ids[id] = 1; var model = new SearchIndexModel(); model.Id = id; model.Score = hits.Score(i); model.Subject = doc.Get("subject"); model.Type = (EntryTypes)Enum.Parse(typeof(EntryTypes), doc.Get("type")); string raw_text = HttpUtility.HtmlEncode(doc.Get("raw_text")); //string raw_text = doc.Get("raw_text"); Lucene.Net.Analysis.TokenStream stream = analyzer.TokenStream("text", new System.IO.StringReader( raw_text)); string highlighted_text = highlighter.GetBestFragments(stream, raw_text, 3, "...").Replace("'", "''"); if (highlighted_text == "") // someties the highlighter fails to emit text... { highlighted_text = raw_text.Replace("'", "''"); } if (highlighted_text.Length > 3000) { highlighted_text = highlighted_text.Substring(0, 3000); } model.HighlightedText = highlighted_text; list.Add(model); } } else { break; } } result.List = list; result.SearchPhrase = searchText; if (list.Count == 0) { result.Message = string.Format("Įrašų pagal užklausą '{0}' nerasta. Patikslinkite paieškos duomenis.", searchText); } return(result); }
public override Weight CreateWeight(Searcher searcher) { return new PayloadNearSpanWeight(this, this, searcher); }
public PayloadNearSpanWeight(PayloadNearQuery enclosingInstance, SpanQuery query, Searcher searcher) : base(query, searcher) { InitBlock(enclosingInstance); }
public override Weight CreateWeight(Searcher searcher) { return(new BoostingTermWeight(this, this, searcher)); }
protected internal override Weight CreateWeight(Searcher searcher) { return new SpanWeight(this, searcher); }
// idf used for phrase queries public override float Idf(System.Collections.ICollection terms, Searcher searcher) { return 1; }
public override Similarity GetSimilarity(Searcher searcher) { return maskedQuery.GetSimilarity(searcher); }
public Weight CreateWeight_ForNUnitTest(Searcher searcher) { return new SpanWeight(this, searcher); }
public override float Idf(System.Collections.Generic.IList<Term> terms, Searcher searcher) { return 1.0f; }
public override void SetUp() { base.SetUp(); single = Single(); parallel = Parallel(); }
// idf used for phrase queries public override Explanation.IDFExplanation IdfExplain(ICollection<Term> terms, Searcher searcher) { return new InjectableIDFExplanation { ExplainFunc = () => "Inexplicable", GetIdfFunc = () => 1.0f }; }
public void UpdateIndex(MongoObjectId id, EntryTypes type) { if (id == null) { return; } Lucene.Net.Index.IndexModifier modifier = null; try { if (searcher != null) { try { searcher.Close(); } catch (Exception e) { logger.Error("Exception closing lucene searcher:" + e.Message, e); throw; } searcher = null; } modifier = new Lucene.Net.Index.IndexModifier(CustomAppSettings.SearchIndexFolder, analyzer, false); // same as build, but uses "modifier" instead of write. // uses additional "where" clause for bugid modifier.DeleteDocuments(new Lucene.Net.Index.Term("id", id.ToString())); using (var noSqlSession = noSqlSessionFactory()) { switch (type) { case EntryTypes.Idea: var idea = noSqlSession.GetById <Idea>(id); if (idea != null) { modifier.AddDocument(CreateDoc(id, CreateSearchText(idea), idea.Subject, type)); } break; case EntryTypes.Issue: var issue = noSqlSession.GetById <Issue>(id); if (issue != null) { modifier.AddDocument(CreateDoc(id, CreateSearchText(issue), issue.Subject, type)); } break; case EntryTypes.User: var user = noSqlSession.GetById <User>(id); if (user != null) { modifier.AddDocument(CreateDoc(id, user.FullName, user.FullName, type)); } break; case EntryTypes.Organization: var org = noSqlSession.GetById <Organization>(id); if (org != null) { modifier.AddDocument(CreateDoc(id, org.Name, org.Name, type)); } break; case EntryTypes.Problem: var prob = noSqlSession.GetById <Problem>(id); if (prob != null) { modifier.AddDocument(CreateDoc(id, CreateSearchText(prob), prob.Text.LimitLength(100), type)); } break; default: break; } } } catch (Exception e) { logger.Error("exception updating Lucene index: " + e.Message, e); } finally { try { if (modifier != null) { modifier.Flush(); modifier.Close(); } } catch (Exception e) { logger.Error("exception updating Lucene index: " + e.Message, e); } } }
public override Weight CreateWeight(Searcher searcher, IState state) { return(maskedQuery.CreateWeight(searcher, state)); }
public override float Idf(System.Collections.ICollection terms, Searcher searcher) { return(1.0f); }
public BoostingTermWeight(BoostingTermQuery enclosingInstance, BoostingTermQuery query, Searcher searcher) : base(enclosingInstance, query, searcher) { InitBlock(enclosingInstance); }
public static Lucene.Net.Search.Hits search(Lucene.Net.Search.Query query) { Lucene.Net.Search.Hits hits = null; lock (my_lock) // prevent contention between searches and writing? { if (searcher == null) { searcher = new Lucene.Net.Search.IndexSearcher(MyLucene.index_path); } hits = searcher.Search(query); } return hits; }
// update an existing index static void threadproc_update(object obj) { // just to be safe, make the worker threads wait for each other //System.Console.Beep(540, 20); lock (my_lock) // prevent contention between searching and writing? { //System.Console.Beep(840, 20); try { if (searcher != null) { try { searcher.Close(); } catch (Exception e) { btnet.Util.write_to_log("Exception closing lucene searcher:" + e.Message); btnet.Util.write_to_log(e.StackTrace); } searcher = null; } Lucene.Net.Index.IndexModifier modifier = new Lucene.Net.Index.IndexModifier(index_path, anal, false); // same as buid, but uses "modifier" instead of write. // uses additional "where" clause for bugid int bug_id = (int)obj; btnet.Util.write_to_log("started updating Lucene index using folder " + MyLucene.index_path); modifier.DeleteDocuments(new Lucene.Net.Index.Term("bg_id", Convert.ToString(bug_id))); string sql = @" select bg_id, $custom_cols isnull(bg_tags,'') bg_tags, bg_short_desc from bugs where bg_id = $bugid"; sql = sql.Replace("$bugid",Convert.ToString(bug_id)); DataSet ds_text_custom_cols = get_text_custom_cols(); sql = sql.Replace("$custom_cols", get_text_custom_cols_names(ds_text_custom_cols)); // index the bugs DataRow dr = btnet.DbUtil.get_datarow(sql); modifier.AddDocument(MyLucene.create_doc( (int)dr["bg_id"], 0, "desc", (string)dr["bg_short_desc"])); // tags string tags = (string) dr["bg_tags"]; if (tags != "") { modifier.AddDocument(MyLucene.create_doc( (int)dr["bg_id"], 0, "tags", tags)); } // custom text fields foreach (DataRow dr_custom_col in ds_text_custom_cols.Tables[0].Rows) { string name = (string) dr_custom_col["name"]; string val = Convert.ToString(dr[name]); if (val != "") { modifier.AddDocument(MyLucene.create_doc( (int)dr["bg_id"], 0, name.Replace("'","''"), val)); } } // index the bug posts DataSet ds = btnet.DbUtil.get_dataset(@" select bp_bug, bp_id, isnull(bp_comment_search,bp_comment) [text] from bug_posts where bp_type <> 'update' and bp_hidden_from_external_users = 0 and bp_bug = " + Convert.ToString(bug_id)); foreach (DataRow dr2 in ds.Tables[0].Rows) { modifier.AddDocument(MyLucene.create_doc( (int)dr2["bp_bug"], (int)dr2["bp_id"], "post", (string)dr2["text"])); } modifier.Flush(); modifier.Close(); btnet.Util.write_to_log("done updating Lucene index"); } catch (Exception e) { btnet.Util.write_to_log("exception updating Lucene index: " + e.Message); btnet.Util.write_to_log(e.StackTrace); } } }
public PayloadTermWeight(PayloadTermQuery enclosingInstance, PayloadTermQuery query, Searcher searcher) : base(query, searcher) { InitBlock(enclosingInstance); }
/// <summary> This demonstrates a typical paging search scenario, where the search engine presents /// pages of size n to the user. The user can then go to the next page if interested in /// the next hits. /// /// When the query is executed for the first time, then only enough results are collected /// to fill 5 result pages. If the user wants to page beyond this limit, then the query /// is executed another time and all hits are collected. /// /// </summary> public static void DoPagingSearch(System.IO.StreamReader in_Renamed, Searcher searcher, Query query, int hitsPerPage, bool raw, bool interactive) { // Collect enough docs to show 5 pages TopScoreDocCollector collector = TopScoreDocCollector.create(5 * hitsPerPage, false); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().scoreDocs; int numTotalHits = collector.GetTotalHits(); System.Console.Out.WriteLine(numTotalHits + " total matching documents"); int start = 0; int end = System.Math.Min(numTotalHits, hitsPerPage); while (true) { if (end > hits.Length) { System.Console.Out.WriteLine("Only results 1 - " + hits.Length + " of " + numTotalHits + " total matching documents collected."); System.Console.Out.WriteLine("Collect more (y/n) ?"); System.String line = in_Renamed.ReadLine(); if (line.Length == 0 || line[0] == 'n') { break; } collector = TopScoreDocCollector.create(numTotalHits, false); searcher.Search(query, collector); hits = collector.TopDocs().scoreDocs; } end = System.Math.Min(hits.Length, start + hitsPerPage); for (int i = start; i < end; i++) { if (raw) { // output raw format System.Console.Out.WriteLine("doc=" + hits[i].doc + " score=" + hits[i].score); continue; } Document doc = searcher.Doc(hits[i].doc); System.String path = doc.Get("path"); if (path != null) { System.Console.Out.WriteLine((i + 1) + ". " + path); System.String title = doc.Get("title"); if (title != null) { System.Console.Out.WriteLine(" Title: " + doc.Get("title")); } } else { System.Console.Out.WriteLine((i + 1) + ". " + "No path for this document"); } } if (!interactive) { break; } if (numTotalHits >= end) { bool quit = false; while (true) { System.Console.Out.Write("Press "); if (start - hitsPerPage >= 0) { System.Console.Out.Write("(p)revious page, "); } if (start + hitsPerPage < numTotalHits) { System.Console.Out.Write("(n)ext page, "); } System.Console.Out.WriteLine("(q)uit or enter number to jump to a page."); System.String line = in_Renamed.ReadLine(); if (line.Length == 0 || line[0] == 'q') { quit = true; break; } if (line[0] == 'p') { start = System.Math.Max(0, start - hitsPerPage); break; } else if (line[0] == 'n') { if (start + hitsPerPage < numTotalHits) { start += hitsPerPage; } break; } else { int page = System.Int32.Parse(line); if ((page - 1) * hitsPerPage < numTotalHits) { start = (page - 1) * hitsPerPage; break; } else { System.Console.Out.WriteLine("No such page"); } } } if (quit) { break; } end = System.Math.Min(numTotalHits, start + hitsPerPage); } } }
public BoostingTermWeight(BoostingTermQuery enclosingInstance, BoostingTermQuery query, Searcher searcher):base(enclosingInstance, query, searcher) { InitBlock(enclosingInstance); }
public override Weight CreateWeight(Searcher searcher) { return(new CustomWeight(this, searcher)); }
public override Similarity GetSimilarity(Searcher s) { return(sim); }
public virtual void TestUnRewrittenQuery() { //test to show how rewritten query can still be used searcher = new IndexSearcher(ramDir); Analyzer analyzer = new StandardAnalyzer(); QueryParser parser = new QueryParser(FIELD_NAME, analyzer); Query query = parser.Parse("JF? or Kenned*"); System.Console.Out.WriteLine("Searching with primitive query"); //forget to set this and... //query=query.rewrite(reader); Hits hits = searcher.Search(query); //create an instance of the highlighter with the tags used to surround highlighted text // QueryHighlightExtractor highlighter = new QueryHighlightExtractor(this, query, new StandardAnalyzer()); Highlighter highlighter = new Highlighter(this, new QueryScorer(query)); highlighter.SetTextFragmenter(new SimpleFragmenter(40)); int maxNumFragmentsRequired = 3; for (int i = 0; i < hits.Length(); i++) { System.String text = hits.Doc(i).Get(FIELD_NAME); TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text)); System.String highlightedText = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired, "..."); System.Console.Out.WriteLine(highlightedText); } //We expect to have zero highlights if the query is multi-terms and is not rewritten! Assert.IsTrue(numHighlights == 0, "Failed to find correct number of highlights " + numHighlights + " found"); }
/*(non-Javadoc) @see Lucene.Net.Search.Query#createWeight(Lucene.Net.Search.Searcher) */ protected internal override Weight CreateWeight(Searcher searcher) { return(new CustomWeight(this, searcher)); }
// update an existing index static void threadproc_update(object obj) { // just to be safe, make the worker threads wait for each other //System.Console.Beep(540, 20); lock (my_lock) // prevent contention between searching and writing? { //System.Console.Beep(840, 20); try { if (searcher != null) { try { searcher.Close(); } catch (Exception e) { btnet.Util.write_to_log("Exception closing lucene searcher:" + e.Message); } searcher = null; } Lucene.Net.Index.IndexModifier modifier = new Lucene.Net.Index.IndexModifier(index_path, anal, false); // same as buid, but uses "modifier" instead of write. // uses additional "where" clause for bugid int bug_id = (int)obj; btnet.Util.write_to_log("started updating Lucene index using folder " + MyLucene.index_path); modifier.DeleteDocuments(new Lucene.Net.Index.Term("bg_id", Convert.ToString(bug_id))); string sql = @" select bg_id, $custom_cols isnull(bg_tags,'') bg_tags, bg_short_desc from bugs where bg_id = $bugid"; sql = sql.Replace("$bugid", Convert.ToString(bug_id)); DataSet ds_text_custom_cols = get_text_custom_cols(); sql = sql.Replace("$custom_cols", get_text_custom_cols_names(ds_text_custom_cols)); // index the bugs DataRow dr = btnet.DbUtil.get_datarow(sql); modifier.AddDocument(MyLucene.create_doc( (int)dr["bg_id"], 0, "desc", (string)dr["bg_short_desc"])); // tags string tags = (string)dr["bg_tags"]; if (tags != "") { modifier.AddDocument(MyLucene.create_doc( (int)dr["bg_id"], 0, "tags", tags)); } // custom text fields foreach (DataRow dr_custom_col in ds_text_custom_cols.Tables[0].Rows) { string name = (string)dr_custom_col["name"]; string val = Convert.ToString(dr[name]); if (val != "") { modifier.AddDocument(MyLucene.create_doc( (int)dr["bg_id"], 0, name.Replace("'", "''"), val)); } } // index the bug posts DataSet ds = btnet.DbUtil.get_dataset(@" select bp_bug, bp_id, isnull(bp_comment_search,bp_comment) [text] from bug_posts where bp_type <> 'update' and bp_hidden_from_external_users = 0 and bp_bug = " + Convert.ToString(bug_id)); foreach (DataRow dr2 in ds.Tables[0].Rows) { modifier.AddDocument(MyLucene.create_doc( (int)dr2["bp_bug"], (int)dr2["bp_id"], "post", (string)dr2["text"])); } modifier.Flush(); modifier.Close(); btnet.Util.write_to_log("done updating Lucene index"); } catch (Exception e) { btnet.Util.write_to_log("exception updating Lucene index: " + e.Message); } } }
/// <summary> This demonstrates a typical paging search scenario, where the search engine presents /// pages of size n to the user. The user can then go to the next page if interested in /// the next hits. /// /// When the query is executed for the first time, then only enough results are collected /// to fill 5 result pages. If the user wants to page beyond this limit, then the query /// is executed another time and all hits are collected. /// /// </summary> public static void DoPagingSearch(System.IO.StreamReader in_Renamed, Searcher searcher, Query query, int hitsPerPage, bool raw, bool interactive) { // Collect enough docs to show 5 pages TopScoreDocCollector collector = TopScoreDocCollector.create(5 * hitsPerPage, false); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().scoreDocs; int numTotalHits = collector.GetTotalHits(); System.Console.Out.WriteLine(numTotalHits + " total matching documents"); int start = 0; int end = System.Math.Min(numTotalHits, hitsPerPage); while (true) { if (end > hits.Length) { System.Console.Out.WriteLine("Only results 1 - " + hits.Length + " of " + numTotalHits + " total matching documents collected."); System.Console.Out.WriteLine("Collect more (y/n) ?"); System.String line = in_Renamed.ReadLine(); if (line.Length == 0 || line[0] == 'n') { break; } collector = TopScoreDocCollector.create(numTotalHits, false); searcher.Search(query, collector); hits = collector.TopDocs().scoreDocs; } end = System.Math.Min(hits.Length, start + hitsPerPage); for (int i = start; i < end; i++) { if (raw) { // output raw format System.Console.Out.WriteLine("doc=" + hits[i].doc + " score=" + hits[i].score); continue; } Document doc = searcher.Doc(hits[i].doc); System.String path = doc.Get("path"); if (path != null) { System.Console.Out.WriteLine((i + 1) + ". " + path); System.String title = doc.Get("title"); if (title != null) { System.Console.Out.WriteLine(" Title: " + doc.Get("title")); } } else { System.Console.Out.WriteLine((i + 1) + ". " + "No path for this document"); } } if (!interactive) { break; } if (numTotalHits >= end) { bool quit = false; while (true) { System.Console.Out.Write("Press "); if (start - hitsPerPage >= 0) { System.Console.Out.Write("(p)revious page, "); } if (start + hitsPerPage < numTotalHits) { System.Console.Out.Write("(n)ext page, "); } System.Console.Out.WriteLine("(q)uit or enter number to jump to a page."); System.String line = in_Renamed.ReadLine(); if (line.Length == 0 || line[0] == 'q') { quit = true; break; } if (line[0] == 'p') { start = System.Math.Max(0, start - hitsPerPage); break; } else if (line[0] == 'n') { if (start + hitsPerPage < numTotalHits) { start += hitsPerPage; } break; } else { int page = System.Int32.Parse(line); if ((page - 1) * hitsPerPage < numTotalHits) { start = (page - 1) * hitsPerPage; break; } else { System.Console.Out.WriteLine("No such page"); } } } if (quit) break; end = System.Math.Min(numTotalHits, start + hitsPerPage); } } }
// LUCENE-1404 private int HitCount(Searcher searcher, System.String word) { return(searcher.Search(new TermQuery(new Term("text", word)), 10).TotalHits); }
// LUCENE-1404 private int HitCount(Searcher searcher, System.String word) { return searcher.Search(new TermQuery(new Term("text", word)), 10).totalHits; }
private void CreateIndex(int numHits) { int numDocs = 500; Directory directory = new SeekCountingDirectory(this); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.UseCompoundFile = false; writer.SetMaxBufferedDocs(10); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); System.String content; if (i % (numDocs / numHits) == 0) { // add a document that matches the query "term1 term2" content = this.term1 + " " + this.term2; } else if (i % 15 == 0) { // add a document that only contains term1 content = this.term1 + " " + this.term1; } else { // add a document that contains term2 but not term 1 content = this.term3 + " " + this.term2; } doc.Add(new Field(this.field, content, Field.Store.YES, Field.Index.ANALYZED)); writer.AddDocument(doc); } // make sure the index has only a single segment writer.Optimize(); writer.Close(); SegmentReader reader = SegmentReader.GetOnlySegmentReader(directory); this.searcher = new IndexSearcher(reader); }
// idf used for phrase queries public override Explanation.IDFExplanation IdfExplain(ICollection <Term> terms, Searcher searcher) { return(new InjectableIDFExplanation { ExplainFunc = () => "Inexplicable", GetIdfFunc = () => 1.0f }); }
private void CreateIndex(int numHits) { int numDocs = 500; Directory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true); writer.SetMaxBufferedDocs(10); for (int i = 0; i < numDocs; i++) { Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document(); System.String content; if (i % (numDocs / numHits) == 0) { // add a document that matches the query "term1 term2" content = this.term1 + " " + this.term2; } else if (i % 15 == 0) { // add a document that only contains term1 content = this.term1 + " " + this.term1; } else { // add a document that contains term2 but not term 1 content = this.term3 + " " + this.term2; } doc.Add(new Field(this.field, content, Field.Store.YES, Field.Index.TOKENIZED)); writer.AddDocument(doc); } // make sure the index has only a single segment writer.Optimize(); writer.Close(); // the index is a single segment, thus IndexReader.open() returns an instance of SegmentReader SegmentReader reader = (SegmentReader) IndexReader.Open(directory); // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek() reader.ProxStream_ForNUnitTest = new SeeksCountingStream(this, reader.ProxStream_ForNUnitTest); this.searcher = new IndexSearcher(reader); }
public override Similarity GetSimilarity(Searcher s) { return sim; }
public Weight CreateWeight_ForNUnitTest(Searcher searcher) { return(new SpanWeight(this, searcher)); }
public override Weight CreateWeight(Searcher searcher) { return new CustomWeight(this, searcher); }
public override Weight CreateWeight(Searcher searcher) { return(new PayloadNearSpanWeight(this, this, searcher)); }
public override Weight CreateWeight(Searcher searcher, IState state) { return(new PayloadTermWeight(this, this, searcher, state)); }