public override System.Collections.BitArray Bits(Monodoc.Lucene.Net.Index.IndexReader reader) { if (cache == null) { cache = new System.Collections.Hashtable(); } lock (cache.SyncRoot) { // check cache System.Collections.BitArray cached = (System.Collections.BitArray) cache[reader]; if (cached != null) { return cached; } } System.Collections.BitArray bits = filter.Bits(reader); lock (cache.SyncRoot) { // update cache cache[reader] = bits; } return bits; }
private void InitBlock(System.Collections.BitArray bits, int[] totalHits, Monodoc.Lucene.Net.Search.FieldSortedHitQueue hq, IndexSearcher enclosingInstance) { this.bits = bits; this.totalHits = totalHits; this.hq = hq; this.enclosingInstance = enclosingInstance; }
public override System.Collections.BitArray Bits(Monodoc.Lucene.Net.Index.IndexReader reader) { if (cache == null) { cache = new System.Collections.Hashtable(); } lock (cache.SyncRoot) { // check cache System.Collections.BitArray cached = (System.Collections.BitArray) cache[reader]; if (cached != null) { return cached; } } System.Collections.BitArray bits = new System.Collections.BitArray((reader.MaxDoc() % 64 == 0?reader.MaxDoc() / 64:reader.MaxDoc() / 64 + 1) * 64); new IndexSearcher(reader).Search(query, new AnonymousClassHitCollector(bits, this)); lock (cache.SyncRoot) { // update cache cache[reader] = bits; } return bits; }
private void InitBlock(Monodoc.Lucene.Net.Index.IndexReader reader, SpanNotQuery enclosingInstance) { this.reader = reader; this.enclosingInstance = enclosingInstance; includeSpans = Enclosing_Instance.include.GetSpans(reader); excludeSpans = Enclosing_Instance.exclude.GetSpans(reader); }
public override Query Rewrite(Monodoc.Lucene.Net.Index.IndexReader reader) { BooleanQuery query = new BooleanQuery(); TermEnum enumerator = reader.Terms(prefix); try { System.String prefixText = prefix.Text(); System.String prefixField = prefix.Field(); do { Term term = enumerator.Term(); if (term != null && term.Text().StartsWith(prefixText) && (System.Object) term.Field() == (System.Object) prefixField) { TermQuery tq = new TermQuery(term); // found a match tq.SetBoost(GetBoost()); // set the boost query.Add(tq, false, false); // add to query //System.out.println("added " + term); } else { break; } } while (enumerator.Next()); } finally { enumerator.Close(); } return query; }
/// <summary>Creates a searcher which searches <i>searchables</i>. </summary> public MultiSearcher(Monodoc.Lucene.Net.Search.Searchable[] searchables) { this.searchables = searchables; starts = new int[searchables.Length + 1]; // build starts array for (int i = 0; i < searchables.Length; i++) { starts[i] = maxDoc; maxDoc += searchables[i].MaxDoc(); // compute maxDocs } starts[searchables.Length] = maxDoc; }
private void InitBlock(Monodoc.Lucene.Net.Index.IndexReader reader, SpanOrQuery enclosingInstance) { this.reader = reader; this.enclosingInstance = enclosingInstance; all = new System.Collections.ArrayList(Enclosing_Instance.clauses.Count); queue = new SpanQueue(enclosingInstance, Enclosing_Instance.clauses.Count); System.Collections.IEnumerator i = Enclosing_Instance.clauses.GetEnumerator(); while (i.MoveNext()) { // initialize all all.Add(((SpanQuery) i.Current).GetSpans(reader)); } }
private void Initialize(Monodoc.Lucene.Net.Index.IndexReader[] subReaders) { this.subReaders = subReaders; starts = new int[subReaders.Length + 1]; // build starts array for (int i = 0; i < subReaders.Length; i++) { starts[i] = maxDoc; maxDoc += subReaders[i].MaxDoc(); // compute maxDocs if (subReaders[i].HasDeletions()) hasDeletions = true; } starts[subReaders.Length] = maxDoc; }
/// <summary> FIXME: Describe <code>rewrite</code> method here. /// /// </summary> /// <param name="reader">an <code>Monodoc.Lucene.Net.Index.IndexReader</code> value /// </param> /// <returns> a <code>Query</code> value /// </returns> /// <exception cref=""> IOException if an error occurs /// </exception> public override Query Rewrite(Monodoc.Lucene.Net.Index.IndexReader reader) { BooleanQuery query = new BooleanQuery(); TermEnum enumerator = reader.Terms(lowerTerm); try { bool checkLower = false; if (!inclusive) // make adjustments to set to exclusive checkLower = true; System.String testField = GetField(); do { Term term = enumerator.Term(); if (term != null && (System.Object) term.Field() == (System.Object) testField) { if (!checkLower || String.CompareOrdinal(term.Text(), lowerTerm.Text()) > 0) { checkLower = false; if (upperTerm != null) { int compare = String.CompareOrdinal(upperTerm.Text(), term.Text()); /* if beyond the upper term, or is exclusive and * this is equal to the upper term, break out */ if ((compare < 0) || (!inclusive && compare == 0)) break; } TermQuery tq = new TermQuery(term); // found a match tq.SetBoost(GetBoost()); // set the boost query.Add(tq, false, false); // add to query } } else { break; } } while (enumerator.Next()); } finally { enumerator.Close(); } return query; }
internal int[] docMap = null; // maps around deleted docs internal SegmentMergeInfo(int b, TermEnum te, Monodoc.Lucene.Net.Index.IndexReader r) { base_Renamed = b; reader = r; termEnum = te; term = te.Term(); postings = reader.TermPositions(); // build array which maps document numbers around deletions if (reader.HasDeletions()) { int maxDoc = reader.MaxDoc(); docMap = new int[maxDoc]; int j = 0; for (int i = 0; i < maxDoc; i++) { if (reader.IsDeleted(i)) docMap[i] = - 1; else docMap[i] = j++; } } }
/// <summary> Creates a new <code>WildcardTermEnum</code>. Passing in a /// {@link Monodoc.Lucene.Net.Index.Term Term} that does not contain a /// <code>WILDCARD_CHAR</code> will cause an exception to be thrown. /// </summary> public WildcardTermEnum(Monodoc.Lucene.Net.Index.IndexReader reader, Term term):base() { searchTerm = term; field = searchTerm.Field(); text = searchTerm.Text(); int sidx = text.IndexOf((System.Char) WILDCARD_STRING); int cidx = text.IndexOf((System.Char) WILDCARD_CHAR); int idx = sidx; if (idx == - 1) { idx = cidx; } else if (cidx >= 0) { idx = System.Math.Min(idx, cidx); } pre = searchTerm.Text().Substring(0, (idx) - (0)); preLen = pre.Length; text = text.Substring(preLen); SetEnum(reader.Terms(new Term(searchTerm.Field(), pre))); }
public override Query Rewrite(Monodoc.Lucene.Net.Index.IndexReader reader) { FilteredTermEnum enumerator = GetEnum(reader); BooleanQuery query = new BooleanQuery(); try { do { Term t = enumerator.Term(); if (t != null) { TermQuery tq = new TermQuery(t); // found a match tq.SetBoost(GetBoost() * enumerator.Difference()); // set the boost query.Add(tq, false, false); // add to query } } while (enumerator.Next()); } finally { enumerator.Close(); } return query; }
public AnonymousClassSpans(Monodoc.Lucene.Net.Index.IndexReader reader, SpanOrQuery enclosingInstance) { InitBlock(reader, enclosingInstance); }
public override Spans GetSpans(Monodoc.Lucene.Net.Index.IndexReader reader) { if (clauses.Count == 1) // optimize 1-clause case return ((SpanQuery) clauses[0]).GetSpans(reader); return new AnonymousClassSpans(reader, this); }
public virtual Explanation Explain(Monodoc.Lucene.Net.Index.IndexReader reader, int doc) { Explanation result = new Explanation(); result.SetDescription("weight(" + Query + " in " + doc + "), product of:"); Explanation idfExpl = new Explanation(idf, "idf(" + Query + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + Query + "), product of:"); Explanation boostExpl = new Explanation(Enclosing_Instance.GetBoost(), "boost"); if (Enclosing_Instance.GetBoost() != 1.0f) queryExpl.AddDetail(boostExpl); queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain Field weight Explanation fieldExpl = new Explanation(); fieldExpl.SetDescription("fieldWeight(" + Query + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(Enclosing_Instance.field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]):0.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(Field=" + Enclosing_Instance.field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) return fieldExpl; return result; }
public virtual Scorer Scorer(Monodoc.Lucene.Net.Index.IndexReader reader) { if (Enclosing_Instance.termArrays.Count == 0) // optimize zero-term case return null; TermPositions[] tps = new TermPositions[Enclosing_Instance.termArrays.Count]; for (int i = 0; i < tps.Length; i++) { Term[] terms = (Term[]) Enclosing_Instance.termArrays[i]; TermPositions p; if (terms.Length > 1) p = new MultipleTermPositions(reader, terms); else p = reader.TermPositions(terms[0]); if (p == null) return null; tps[i] = p; } if (Enclosing_Instance.slop == 0) return new ExactPhraseScorer(this, tps, Enclosing_Instance.GetPositions(), Enclosing_Instance.GetSimilarity(searcher), reader.Norms(Enclosing_Instance.field)); else return new SloppyPhraseScorer(this, tps, Enclosing_Instance.GetPositions(), Enclosing_Instance.GetSimilarity(searcher), Enclosing_Instance.slop, reader.Norms(Enclosing_Instance.field)); }
protected internal override FilteredTermEnum GetEnum(Monodoc.Lucene.Net.Index.IndexReader reader) { return new FuzzyTermEnum(reader, GetTerm(), minimumSimilarity, prefixLength); }
/// <summary>Returns a BitSet with true for documents which should be permitted in /// search results, and false for those that should not. /// </summary> public abstract System.Collections.BitArray Bits(Monodoc.Lucene.Net.Index.IndexReader reader);
internal AnonymousClassWith1(Monodoc.Lucene.Net.Index.IndexReader enclosingInstance, Lucene.Net.Store.Lock Param1, long Param2) : base(Param1, Param2) { InitBlock(enclosingInstance); }
private void InitBlock(Monodoc.Lucene.Net.Index.IndexReader enclosingInstance) { this.enclosingInstance = enclosingInstance; }
// inherit javadocs public virtual ScoreDocComparator NewComparator(Monodoc.Lucene.Net.Index.IndexReader reader, System.String fieldname) { System.String field = String.Intern(fieldname); System.IComparable[] cachedValues = Monodoc.Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetCustom(reader, field, this); return new AnonymousClassScoreDocComparator(cachedValues, this); }
/// <summary> Constructor for enumeration of all terms from specified <code>reader</code> which share a prefix of /// length <code>prefixLength</code> with <code>term</code> and which have a fuzzy similarity > /// <code>minSimilarity</code>. /// /// </summary> /// <param name="reader">Delivers terms. /// </param> /// <param name="term">Pattern term. /// </param> /// <param name="minSimilarity">Minimum required similarity for terms from the reader. Default value is 0.5f. /// </param> /// <param name="prefixLength">Length of required common prefix. Default value is 0. /// </param> /// <throws> IOException </throws> public FuzzyTermEnum(Monodoc.Lucene.Net.Index.IndexReader reader, Term term, float minSimilarity, int prefixLength):base() { InitBlock(); minimumSimilarity = minSimilarity; scale_factor = 1.0f / (1.0f - minimumSimilarity); searchTerm = term; field = searchTerm.Field(); text = searchTerm.Text(); textlen = text.Length; if (prefixLength > 0 && prefixLength < textlen) { this.prefixLength = prefixLength; prefix = text.Substring(0, (prefixLength) - (0)); text = text.Substring(prefixLength); textlen = text.Length; } SetEnum(reader.Terms(new Term(searchTerm.Field(), prefix))); }
public virtual Explanation Explain(Monodoc.Lucene.Net.Index.IndexReader reader, int doc) { Explanation sumExpl = new Explanation(); sumExpl.SetDescription("sum of:"); int coord = 0; int maxCoord = 0; float sum = 0.0f; for (int i = 0; i < weights.Count; i++) { BooleanClause c = (BooleanClause) Enclosing_Instance.clauses[i]; Weight w = (Weight) weights[i]; Explanation e = w.Explain(reader, doc); if (!c.prohibited) maxCoord++; if (e.GetValue() > 0) { if (!c.prohibited) { sumExpl.AddDetail(e); sum += e.GetValue(); coord++; } else { return new Explanation(0.0f, "match prohibited"); } } else if (c.required) { return new Explanation(0.0f, "match required"); } } sumExpl.SetValue(sum); if (coord == 1) // only one clause matched sumExpl = sumExpl.GetDetails()[0]; // eliminate wrapper float coordFactor = Enclosing_Instance.GetSimilarity(searcher).Coord(coord, maxCoord); if (coordFactor == 1.0f) // coord is no-op return sumExpl; // eliminate wrapper else { Explanation result = new Explanation(); result.SetDescription("product of:"); result.AddDetail(sumExpl); result.AddDetail(new Explanation(coordFactor, "coord(" + coord + "/" + maxCoord + ")")); result.SetValue(sum * coordFactor); return result; } }
public virtual Explanation Explain(Monodoc.Lucene.Net.Index.IndexReader reader, int doc) { Explanation result = new Explanation(); result.SetDescription("weight(" + Query + " in " + doc + "), product of:"); System.String field = ((SpanQuery) Query).GetField(); System.Text.StringBuilder docFreqs = new System.Text.StringBuilder(); System.Collections.IEnumerator i = terms.GetEnumerator(); while (i.MoveNext()) { Term term = (Term) i.Current; docFreqs.Append(term.Text()); docFreqs.Append("="); docFreqs.Append(searcher.DocFreq(term)); if (i.MoveNext()) { docFreqs.Append(" "); } } Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + docFreqs + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + Query + "), product of:"); Explanation boostExpl = new Explanation(Query.GetBoost(), "boost"); if (Query.GetBoost() != 1.0f) queryExpl.AddDetail(boostExpl); queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain Field weight Explanation fieldExpl = new Explanation(); fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null ? Similarity.DecodeNorm(fieldNorms[doc]) : 0.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(Field=" + field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) return fieldExpl; return result; }
public virtual Scorer Scorer(Monodoc.Lucene.Net.Index.IndexReader reader) { TermDocs termDocs = reader.TermDocs(Enclosing_Instance.term); if (termDocs == null) return null; return new TermScorer(this, termDocs, Enclosing_Instance.GetSimilarity(searcher), reader.Norms(Enclosing_Instance.term.Field())); }
public override Spans GetSpans(Monodoc.Lucene.Net.Index.IndexReader reader) { return new AnonymousClassSpans(reader, this); }
public virtual Scorer Scorer(Monodoc.Lucene.Net.Index.IndexReader reader) { // First see if the (faster) ConjunctionScorer will work. This can be // used when all clauses are required. Also, at this point a // BooleanScorer cannot be embedded in a ConjunctionScorer, as the hits // from a BooleanScorer are not always sorted by document number (sigh) // and hence BooleanScorer cannot implement skipTo() correctly, which is // required by ConjunctionScorer. bool allRequired = true; bool noneBoolean = true; for (int i = 0; i < weights.Count; i++) { BooleanClause c = (BooleanClause) Enclosing_Instance.clauses[i]; if (!c.required) allRequired = false; if (c.query is BooleanQuery) noneBoolean = false; } if (allRequired && noneBoolean) { // ConjunctionScorer is okay ConjunctionScorer result = new ConjunctionScorer(Enclosing_Instance.GetSimilarity(searcher)); for (int i = 0; i < weights.Count; i++) { Weight w = (Weight) weights[i]; Scorer subScorer = w.Scorer(reader); if (subScorer == null) return null; result.Add(subScorer); } return result; } // Use good-old BooleanScorer instead. BooleanScorer result2 = new BooleanScorer(Enclosing_Instance.GetSimilarity(searcher)); for (int i = 0; i < weights.Count; i++) { BooleanClause c = (BooleanClause) Enclosing_Instance.clauses[i]; Weight w = (Weight) weights[i]; Scorer subScorer = w.Scorer(reader); if (subScorer != null) result2.Add(subScorer, c.required, c.prohibited); else if (c.required) return null; } return result2; }
/// <summary> Empty prefix and minSimilarity of 0.5f are used. /// /// </summary> /// <param name="">reader /// </param> /// <param name="">term /// </param> /// <throws> IOException </throws> /// <seealso cref="Term, float, int)"> /// </seealso> public FuzzyTermEnum(Monodoc.Lucene.Net.Index.IndexReader reader, Term term):this(reader, term, FuzzyQuery.defaultMinSimilarity, 0) { }
public override Query Rewrite(Monodoc.Lucene.Net.Index.IndexReader reader) { if (clauses.Count == 1) { // optimize 1-clause queries BooleanClause c = (BooleanClause) clauses[0]; if (!c.prohibited) { // just return clause Query query = c.query.Rewrite(reader); // rewrite first if (GetBoost() != 1.0f) { // incorporate boost if (query == c.query) // if rewrite was no-op query = (Query) query.Clone(); // then clone before boost query.SetBoost(GetBoost() * query.GetBoost()); } return query; } } BooleanQuery clone = null; // recursively rewrite for (int i = 0; i < clauses.Count; i++) { BooleanClause c = (BooleanClause) clauses[i]; Query query = c.query.Rewrite(reader); if (query != c.query) { // clause rewrote: must clone if (clone == null) clone = (BooleanQuery) this.Clone(); clone.clauses[i] = new BooleanClause(query, c.required, c.prohibited); } } if (clone != null) { return clone; // some clauses rewrote } else return this; // no clauses rewrote }
/// <summary> This is the standard FuzzyTermEnum with an empty prefix. /// /// </summary> /// <param name="">reader /// </param> /// <param name="">term /// </param> /// <param name="">minSimilarity /// </param> /// <throws> IOException </throws> /// <seealso cref="Term, float, int)"> /// </seealso> public FuzzyTermEnum(Monodoc.Lucene.Net.Index.IndexReader reader, Term term, float minSimilarity):this(reader, term, minSimilarity, 0) { }