Note that you can only access Hits from a Searcher as long as it is not yet closed, otherwise an IOException will be thrown.
public static void CheckHits_(Query query, System.String defaultFieldName, Searcher searcher, int[] results, TestCase testCase) { Hits hits = searcher.Search(query); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { correct.Add((System.Int32) results[i], null); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); for (int i = 0; i < hits.Length(); i++) { actual.Add((System.Int32) hits.Id(i), null); } //Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); if (correct.Count != 0) { System.Collections.IDictionaryEnumerator iter = correct.GetEnumerator(); bool status = false; while (iter.MoveNext()) { status = actual.ContainsKey(iter.Key); if (status == false) break; } Assert.IsTrue(status, query.ToString(defaultFieldName)); } }
public List<HintResult> Search(Predicate query, int count) { if (_searcher == null) { _searcher = new IndexSearcher(this._directory, true); } TopDocs docsFound = _searcher.Search(query.GetQuery(), count); var list = new List<HintResult>(); foreach (var score in docsFound.ScoreDocs) { Document doc = this._searcher.Doc(score.doc); var result = new HintResult(); foreach (var field in doc.GetFields()) { //result.FieldsValues.Add(field. .Name(), field.StringValue()); } list.Add(result); } return list; }
internal Hits(Searcher s, Query q, Filter f) { weight = q.Weight(s); searcher = s; filter = f; GetMoreDocs(50); // retrieve 100 initially }
public TermWeight(TermQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); idfExp = similarity.IdfExplain(Enclosing_Instance.term, searcher); idf = idfExp.Idf; }
/// <summary> /// Initializes a new instance of the <see cref="SearchResults" /> class. /// </summary> /// <param name="searcher">The searcher.</param> /// <param name="reader">The reader.</param> /// <param name="docs">The hits.</param> /// <param name="criteria">The criteria.</param> /// <param name="query">The query.</param> public LuceneSearchResults(Searcher searcher, IndexReader reader, TopDocs docs, ISearchCriteria criteria, Query query) { Results = new SearchResults(criteria, null); CreateDocuments(searcher, docs); CreateFacets(reader, query); CreateSuggestions(reader, criteria); }
internal SearchResults(Query query, IEnumerable<SortField> sortField, Searcher searcher, int maxResults) { LuceneQuery = query; LuceneSearcher = searcher; DoSearch(query, sortField, maxResults); }
public /*internal*/ bool debugCheckedForDeletions = false; // for test purposes. internal Hits(Searcher s, Query q, Filter f) { weight = q.Weight(s); searcher = s; filter = f; nDeletions = CountDeletions(s); GetMoreDocs(50); // retrieve 100 initially lengthAtStart = length; }
// count # deletions, return -1 if unknown. private int CountDeletions(Searcher s) { int cnt = - 1; if (s is IndexSearcher) { cnt = s.MaxDoc() - ((IndexSearcher) s).GetIndexReader().NumDocs(); } return cnt; }
public SpanWeight(SpanQuery query, Searcher searcher) { this.similarity = query.GetSimilarity(searcher); this.query = query; terms = new System.Collections.Hashtable(); query.ExtractTerms(terms); idfExp = similarity.idfExplain(new System.Collections.ArrayList(terms.Values), searcher); idf = idfExp.GetIdf(); }
public SpanWeight(SpanQuery query, Searcher searcher) { this.similarity = query.GetSimilarity(searcher); this.query = query; terms = new Support.Set<Lucene.Net.Index.Term>(); query.ExtractTerms(terms); idfExp = similarity.idfExplain(terms.ToArray(), searcher); idf = idfExp.GetIdf(); }
public SpanWeight(SpanQuery query, Searcher searcher) { this.similarity = query.GetSimilarity(searcher); this.internalQuery = query; terms = Lucene.Net.Support.Compatibility.SetFactory.CreateHashSet<Term>(); query.ExtractTerms(terms); idfExp = similarity.IdfExplain(terms, searcher); idf = idfExp.Idf; }
public string GetHighlight(string value, string highlightField, Searcher searcher, string luceneRawQuery) { var query = GetQueryParser(highlightField).Parse(luceneRawQuery); var scorer = new QueryScorer(searcher.Rewrite(query)); var highlighter = new Highlighter(HighlightFormatter, scorer); var tokenStream = HighlightAnalyzer.TokenStream(highlightField, new StringReader(value)); string bestFragments = highlighter.GetBestFragments(tokenStream, value, MaxNumHighlights, Separator); return bestFragments; }
public SpanWeight(SpanQuery query, Searcher searcher) { this.similarity = query.GetSimilarity(searcher); this.query = query; terms = new System.Collections.Hashtable(); query.ExtractTerms(terms); System.Collections.ArrayList tmp = new System.Collections.ArrayList(terms.Values); idf = this.query.GetSimilarity(searcher).Idf(tmp, searcher); }
private static void GetResults(ref List<Airport> itemsList, TopDocs results, Searcher searcher) { foreach (ScoreDoc scoreDoc in results.ScoreDocs) { var item = new Airport(); Document doc = searcher.Doc(scoreDoc.Doc); item.id = doc.Get("Code"); item.label = doc.Get("CityName") + " - " + doc.Get("Name") + " (" + doc.Get("Code") + ")"; item.value = doc.Get("CityName") + " - " + doc.Get("Name") + " (" + doc.Get("Code") + ")"; itemsList.Add(item); } }
/// <summary> /// Creates result document collection from Lucene documents. /// </summary> /// <param name="searcher">The searcher.</param> /// <param name="topDocs">The hits.</param> private void CreateDocuments(Searcher searcher, TopDocs topDocs) { // if no documents found return if (topDocs == null) return; var entries = new List<ResultDocument>(); // get total hits var totalCount = topDocs.TotalHits; var recordsToRetrieve = Results.SearchCriteria.RecordsToRetrieve; var startIndex = Results.SearchCriteria.StartingRecord; if (recordsToRetrieve > totalCount) recordsToRetrieve = totalCount; for (var index = startIndex; index < startIndex + recordsToRetrieve; index++) { if (index >= totalCount) break; var document = searcher.Doc(topDocs.ScoreDocs[index].Doc); var doc = new ResultDocument(); var documentFields = document.GetFields(); using (var fi = documentFields.GetEnumerator()) { while (fi.MoveNext()) { if (fi.Current != null) { var field = fi.Current; doc.Add(new DocumentField(field.Name, field.StringValue)); } } } entries.Add(doc); } var searchDocuments = new ResultDocumentSet { Name = "Items", Documents = entries.OfType<IDocument>().ToArray(), TotalCount = totalCount }; Results.Documents = new[] { searchDocuments }; }
/// <summary> /// Perform synonym expansion on a query. /// </summary> /// <param name="query">users query that is assumed to not have any "special" query syntax, thus it should be just normal words, so "big dog" makes sense, but a query like "title:foo^1.2" doesn't as this should presumably be passed directly to the default query parser </param> /// <param name="syns">a opened to the Lucene index you previously created with <see cref="Syns2Index"/>. The searcher is not closed or otherwise altered. </param> /// <param name="a">optional analyzer used to parse the users query else <see cref="StandardAnalyzer"/> is used </param> /// <param name="field">optional field name to search in or null if you want the default of "contents" </param> /// <param name="boost">optional boost applied to synonyms else no boost is applied </param> /// <returns>the expanded Query </returns> public static Query Expand(String query, Searcher syns, Analyzer a, String field, float boost) { already = new List<String>(); // avoid dups var top = new List<String>(); // needs to be separately listed.. if (field == null) field = "contents"; if (a == null) a = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT); // [1] Parse query into separate words so that when we expand we can avoid dups var ts = a.TokenStream(field, new StringReader(query)); var termAtt = ts.AddAttribute<TermAttribute>(); while (ts.IncrementToken()) { var word = termAtt.Term; if (!already.Contains(word)) { already.Add(word); top.Add(word); } } tmp = new BooleanQuery(); // [2] form query System.Collections.IEnumerator it = top.GetEnumerator(); while (it.MoveNext()) { // [2a] add to level words in var word = (String) it.Current; var tq = new TermQuery(new Term(field, word)); tmp.Add(tq, Occur.SHOULD); var c = new CollectorImpl(field, boost); syns.Search(new TermQuery(new Term(Syns2Index.F_WORD, word)), c); } return tmp; }
public SearchService() { var tmpluceneIndexDir = LuceneStore.FSDirectory.Open(INDEX_DIR); luceneIndexDir = new LuceneStore.RAMDirectory(tmpluceneIndexDir); tmpluceneIndexDir.Close(); analyzer = new StandardAnalyzer(LuceneUtil.Version.LUCENE_29); reader = IndexReader.Open(luceneIndexDir, true); // only searching, so read-only=true searcher = new IndexSearcher(reader); // parser = new QueryParser(LuceneUtil.Version.LUCENE_29, "analized_path", analyzer); parser = new QueryParser(LuceneUtil.Version.LUCENE_29, "name", analyzer); parser.SetDefaultOperator(QueryParser.Operator.AND); parser.SetAllowLeadingWildcard(true); }
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment /// <summary> Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates no match /// (ie: Explanation value of 0.0f) /// </summary> public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results) { System.String d = q.ToString(defaultFieldName); System.Collections.Hashtable ignore = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32) results[i]); } int maxDoc = searcher.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains((System.Int32) doc)) continue; Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
public static Hashtable searchReviewables(string searchText) { query = parser.Parse(searchText + "*"); Hashtable results = new Hashtable(); lock (my_lock) { //Lucene.Net.Search.TopDocs hits = null; Lucene.Net.Search.TopDocs hits = null; try { if (searcher == null) { Lucene.Net.Store.FSDirectory d = Lucene.Net.Store.FSDirectory.Open(_indexPath); searcher = new Lucene.Net.Search.IndexSearcher(d); } hits = searcher.Search(query, 200); } catch (Exception e) { } for (int i = 0; i < hits.ScoreDocs.Count() - 1; i++) { Document doc = searcher.Doc(hits.ScoreDocs[i].Doc); string id = doc.GetField("reviewableid").StringValue; if (!results.Contains(id)) results.Add(id, hits.ScoreDocs[i].Score); float x = hits.MaxScore; } } return results; }
/// <summary> Returns a Weight that applies the filter to the enclosed query's Weight. /// This is accomplished by overriding the Scorer returned by the Weight. /// </summary> protected internal override Weight CreateWeight(Searcher searcher) { Weight weight = query.CreateWeight(searcher); Similarity similarity = query.GetSimilarity(searcher); return new AnonymousClassWeight(weight, similarity, this); }
/// <summary>deep check that explanations of a query 'score' correctly </summary> public static void CheckExplanations(Query q, Searcher s) { CheckHits.CheckExplanations(q, null, s, true); }
/// <summary>Expert: Returns the Similarity implementation to be used for this query. /// Subclasses may override this method to specify their own Similarity /// implementation, perhaps one that delegates through that of the Searcher. /// By default the Searcher's Similarity implementation is returned. /// </summary> public virtual Similarity GetSimilarity(Searcher searcher) { return(searcher.GetSimilarity()); }
/// <summary>Expert: Constructs an appropriate Weight implementation for this query. /// /// <p>Only implemented by primitive queries, which re-write to themselves. /// </summary> protected internal virtual Weight CreateWeight(Searcher searcher) { throw new System.NotSupportedException(); }
public override Weight CreateWeight(Searcher searcher, IState state) { return(new BooleanWeight(this, searcher, state)); }
public override Explanation.IDFExplanation IdfExplain(System.Collections.Generic.ICollection <Term> terms, Searcher searcher, IState state) { return(new AnonymousIDFExplanation()); }
/// <summary> /// Searches for documents mapped from the given type using the specified query and Collector. /// </summary> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="type"> /// The type of the object to search documents for. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="results"> /// The Collector to use to gather results. /// </param> public static void Search(this Searcher searcher, string type, Query query, Collector results) { searcher.Search(query, JsonMappingUtils.GetTypeFilter(type), results); }
protected internal override Weight CreateWeight(Searcher searcher) { return(new ConstantScoreQuery.ConstantWeight(this, searcher)); }
protected internal override Weight CreateWeight(Searcher searcher) { return(new MultiPhraseWeight(this, searcher)); }
/// <summary> Asserts that the explanation value for every document matching a /// query corresponds with the true score. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> /// <seealso cref="CheckExplanations(Query, String, Searcher, boolean) for a"> /// "deep" testing of the explanation details. /// /// </seealso> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> public static void CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher) { CheckExplanations(query, defaultFieldName, searcher, false); }
/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p/> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaying the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { QueryUtils.Check(query, searcher); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, (System.Int32)results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); Collector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); QueryUtils.WrapSearcher(searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Searcher " + i + ": " + query.ToString(defaultFieldName)); } if (!(searcher is IndexSearcher)) { return; } for (int i = -1; i < 2; i++) { actual.Clear(); QueryUtils.WrapUnderlyingReader((IndexSearcher)searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
/// <summary>Constructs an instance which does shallow tests on the Explanation </summary> public ExplanationAsserter(Query q, System.String defaultFieldName, Searcher s) : this(q, defaultFieldName, s, false) { }
public override Weight CreateWeight(Searcher searcher) { return new TermWeight(this, searcher); }
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment /// <summary> Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates no match /// (ie: Explanation value of 0.0f) /// </summary> public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results) { System.String d = q.ToString(defaultFieldName); System.Collections.Hashtable ignore = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32)results[i]); } int maxDoc = searcher.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains((System.Int32)doc)) { continue; } Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
public ConstantWeight(ConstantScoreQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); }
/// <summary>Computes a score factor for a simple term. /// /// <p>The default implementation is:<pre> /// return idf(searcher.docFreq(term), searcher.maxDoc()); /// </pre> /// /// Note that {@link Searcher#MaxDoc()} is used instead of /// {@link IndexReader#NumDocs()} because it is proportional to /// {@link Searcher#DocFreq(Term)} , i.e., when one is inaccurate, /// so is the other, and in the same direction. /// /// </summary> /// <param name="term">the term in question /// </param> /// <param name="searcher">the document collection being searched /// </param> /// <returns> a score factor for the term /// </returns> public virtual float Idf(Term term, Searcher searcher) { return(Ldf(searcher.DocFreq(term), searcher.MaxDoc())); }
public override Weight CreateWeight(Searcher searcher) { return(new MultiPhraseWeight(this, searcher)); }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs; System.Collections.ArrayList correct = new System.Collections.ArrayList(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, results[i]); } correct.Sort(); System.Collections.ArrayList actual = new System.Collections.ArrayList(); for (int i = 0; i < hits.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(actual, hits[i].doc); } actual.Sort(); Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); }
public override Weight CreateWeight(Searcher searcher) { return(new MatchAllDocsWeight(this, searcher)); }
public override Weight CreateWeight(Searcher searcher) { return(new BooleanWeight(this, searcher)); }
/// <summary> Computes a score factor for a simple term and returns an explanation /// for that score factor. /// /// <p/> /// The default implementation uses: /// /// <pre> /// idf(searcher.docFreq(term), searcher.maxDoc()); /// </pre> /// /// Note that {@link Searcher#MaxDoc()} is used instead of /// {@link Lucene.Net.Index.IndexReader#NumDocs()} because it is /// proportional to {@link Searcher#DocFreq(Term)} , i.e., when one is /// inaccurate, so is the other, and in the same direction. /// /// </summary> /// <param name="term">the term in question /// </param> /// <param name="searcher">the document collection being searched /// </param> /// <returns> an IDFExplain object that includes both an idf score factor /// and an explanation for the term. /// </returns> /// <throws> IOException </throws> public virtual IDFExplanation IdfExplain(Term term, Searcher searcher) { if (SupportedMethods.overridesTermIDF) { float idf = Idf(term, searcher); return new AnonymousClassIDFExplanation(idf, this); } int df = searcher.DocFreq(term); int max = searcher.MaxDoc(); float idf2 = Idf(df, max); return new AnonymousClassIDFExplanation1(df, max, idf2, this); }
public override Weight CreateWeight(Searcher searcher) { return(new TermWeight(this, searcher)); }
/// <summary> Computes a score factor for a phrase. /// /// <p/> /// The default implementation sums the idf factor for /// each term in the phrase. /// /// </summary> /// <param name="terms">the terms in the phrase /// </param> /// <param name="searcher">the document collection being searched /// </param> /// <returns> an IDFExplain object that includes both an idf /// score factor for the phrase and an explanation /// for each term. /// </returns> /// <throws> IOException </throws> public virtual IDFExplanation idfExplain(System.Collections.ICollection terms, Searcher searcher) { if (SupportedMethods.overridesCollectionIDF) { float idf = Idf(terms, searcher); return new AnonymousClassIDFExplanation2(idf, this); } int max = searcher.MaxDoc(); float idf2 = 0.0f; System.Text.StringBuilder exp = new System.Text.StringBuilder(); foreach (Term term in terms) { int df = searcher.DocFreq(term); idf2 += Idf(df, max); exp.Append(" "); exp.Append(term.Text()); exp.Append("="); exp.Append(df); } float fIdf = idf2; return new AnonymousClassIDFExplanation3(fIdf, exp, this); }
/// <summary> Expert: Constructs an appropriate Weight implementation for this query. /// /// <p/> /// Only implemented by primitive queries, which re-write to themselves. /// </summary> public virtual Weight CreateWeight(Searcher searcher) { throw new System.NotSupportedException(); }
// Implement coord disabling. // Inherit javadoc. public override Similarity GetSimilarity(Searcher searcher) { Similarity result = base.GetSimilarity(searcher); if (disableCoord) { // disable coord as requested result = new AnonymousClassSimilarityDelegator(this, result); } return result; }
/// <summary> Various query sanity checks on a searcher, some checks are only done for /// instanceof IndexSearcher. /// /// </summary> /// <seealso cref="Check(Query)"> /// </seealso> /// <seealso cref="checkFirstSkipTo"> /// </seealso> /// <seealso cref="checkSkipTo"> /// </seealso> /// <seealso cref="checkExplanations"> /// </seealso> /// <seealso cref="checkSerialization"> /// </seealso> /// <seealso cref="checkEqual"> /// </seealso> public static void Check(Query q1, Searcher s) { Check(q1, s, true); }
public BooleanWeight(BooleanQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); for (int i = 0; i < Enclosing_Instance.clauses.Count; i++) { BooleanClause c = (BooleanClause) Enclosing_Instance.clauses[i]; weights.Add(c.GetQuery().CreateWeight(searcher)); } }
public override void SetUp() { base.SetUp(); // create MultiSearcher from two seperate searchers Directory d1 = new RAMDirectory(); IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); AddCollection1(iw1); iw1.Close(); Directory d2 = new RAMDirectory(); IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); AddCollection2(iw2); iw2.Close(); Searchable[] s = new Searchable[2]; s[0] = new IndexSearcher(d1); s[1] = new IndexSearcher(d2); multiSearcher = new MultiSearcher(s); // create IndexSearcher which contains all documents Directory d = new RAMDirectory(); IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); AddCollection1(iw); AddCollection2(iw); iw.Close(); singleSearcher = new IndexSearcher(d); }
/* Merge into BooleanWeight in case the 1.4 BooleanScorer is dropped */ public BooleanWeight2(BooleanQuery enclosingInstance, Searcher searcher) : base(enclosingInstance, searcher) { InitBlock(enclosingInstance); }
public static TopDocs Search(this Searcher searcher, string type, Query query, int numResults) { return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type), numResults)); }
/// <summary> Asserts that the explanation value for every document matching a /// query corresponds with the true score. Optionally does "deep" /// testing of the explanation details. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed /// </param> public static void CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher, bool deep) { searcher.Search(query, new ExplanationAsserter(query, defaultFieldName, searcher, deep)); }
protected internal override Weight CreateWeight(Searcher searcher) { return new BooleanWeight(this, searcher); }
public static TopDocs Search(this Searcher searcher, string type, Query Filter, Query query, int numResults) { Filter filter = new QueryWrapperFilter(query); return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type, Filter), numResults)); }
public MatchAllDocsWeight(MatchAllDocsQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = searcher.GetSimilarity(); }
public static TopDocs Search(this Searcher searcher, string type, Query query, int numResults, Sort sort) { var res = searcher.Search(query, JsonMappingUtils.GetTypeFilter(type), numResults, sort); return(res); }
public virtual float Idf(Term term, Searcher searcher) { return Idf(searcher.DocFreq(term), searcher.MaxDoc()); }
protected internal override Weight CreateWeight(Searcher searcher) { return(new TermWeight(this, searcher)); }
public virtual float Idf(System.Collections.ICollection terms, Searcher searcher) { float idf = 0.0f; System.Collections.IEnumerator i = terms.GetEnumerator(); while (i.MoveNext()) { idf += Idf((Term) i.Current, searcher); } return idf; }
public TermWeight(TermQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); idf = similarity.Idf(Enclosing_Instance.term, searcher); // compute idf }
protected internal virtual void PrintHits(System.String test, ScoreDoc[] h, Searcher searcher) { System.Console.Error.WriteLine("------- " + test + " -------"); for (int i = 0; i < h.Length; i++) { Document d = searcher.Doc(h[i].Doc); float score = h[i].Score; System.Console.Error.WriteLine("#" + i + ": {0.000000}" + score + " - " + d.Get("id") + " - " + d.Get("data")); } }
public override Similarity GetSimilarity(Searcher searcher) { return(new AnonymousDefaultSimilarity(boost)); }