public Result Search (string term, int count, int start) { try { term = term.ToLower (); Term htTerm = new Term ("hottext", term); Query qq1 = new FuzzyQuery (htTerm); Query qq2 = new TermQuery (htTerm); qq2.SetBoost (10f); Query qq3 = new PrefixQuery (htTerm); qq3.SetBoost (10f); DisjunctionMaxQuery q1 = new DisjunctionMaxQuery (0f); q1.Add (qq1); q1.Add (qq2); q1.Add (qq3); Query q2 = new TermQuery (new Term ("text", term)); q2.SetBoost (3f); Query q3 = new TermQuery (new Term ("examples", term)); q3.SetBoost (3f); DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f); q.Add (q1); q.Add (q2); q.Add (q3); TopDocs top = SearchInternal (q, count, start); Result r = new Result (term, searcher, top.ScoreDocs); Results.Add (r); return r; } catch (IOException) { Console.WriteLine ("No index in {0}", dir); return null; } }
private int[] docMap; // use getDocMap() internal SegmentMergeInfo(int b, TermEnum te, IndexReader r) { base_Renamed = b; reader = r; termEnum = te; term = te.Term(); }
public void Read(IndexInput input, FieldInfos fieldInfos) { this.term = null; // invalidate cache int start = input.ReadVInt(); int length = input.ReadVInt(); int totalLength = start + length; if (preUTF8Strings) { text.SetLength(totalLength); input.ReadChars(text.result, start, length); } else { if (dirty) { // Fully convert all bytes since bytes is dirty UnicodeUtil.UTF16toUTF8(text.result, 0, text.length, bytes); bytes.SetLength(totalLength); input.ReadBytes(bytes.result, start, length); UnicodeUtil.UTF8toUTF16(bytes.result, 0, totalLength, text); dirty = false; } else { // Incrementally convert only the UTF8 bytes that are new: bytes.SetLength(totalLength); input.ReadBytes(bytes.result, start, length); UnicodeUtil.UTF8toUTF16(bytes.result, start, length, text); } } this.field = fieldInfos.FieldName(input.ReadVInt()); }
public virtual bool SkipTo(Term target) { do { if (!Next()) return false; } while (target.CompareTo(Term()) > 0); return true; }
public void Seek(Term term) { if (term == null) { doc = -1; } else { throw new NotSupportedException(); } }
internal override void Seek(TermInfo ti, Term term) { base.Seek(ti, term); if (ti != null) lazySkipPointer = ti.proxPointer; lazySkipProxCount = 0; proxCount = 0; payloadLength = 0; needToLoadPayload = false; }
internal bool Next() { if (termEnum.Next()) { term = termEnum.Term(); return true; } else { term = null; return false; } }
/// <summary>Returns the offset of the greatest index entry which is less than or equal to term.</summary> private int GetIndexOffset(Term term) { int lo = 0; // binary search indexTerms[] int hi = indexTerms.Length - 1; while (hi >= lo) { int mid = SupportClass.Number.URShift((lo + hi), 1); int delta = term.CompareTo(indexTerms[mid]); if (delta < 0) hi = mid - 1; else if (delta > 0) lo = mid + 1; else return mid; } return hi; }
/// <summary>Returns the TermInfo for a Term in the set, or null. </summary> internal TermInfo Get(Term term) { return Get(term, true); }
/// <summary>Returns the TermInfo for a Term in the set, or null. </summary> private TermInfo Get(Term term, bool useCache) { if (size == 0) { return(null); } EnsureIndexIsRead(); TermInfo ti; ThreadResources resources = GetThreadResources(); Mono.Lucene.Net.Util.Cache.Cache cache = null; if (useCache) { cache = resources.termInfoCache; // check the cache first if the term was recently looked up ti = (TermInfo)cache.Get(term); if (ti != null) { return(ti); } } // optimize sequential access: first try scanning cached enum w/o seeking SegmentTermEnum enumerator = resources.termEnum; if (enumerator.Term() != null && ((enumerator.Prev() != null && term.CompareTo(enumerator.Prev()) > 0) || term.CompareTo(enumerator.Term()) >= 0)) { int enumOffset = (int)(enumerator.position / totalIndexInterval) + 1; if (indexTerms.Length == enumOffset || term.CompareTo(indexTerms[enumOffset]) < 0) { // no need to seek int numScans = enumerator.ScanTo(term); if (enumerator.Term() != null && term.CompareTo(enumerator.Term()) == 0) { ti = enumerator.TermInfo(); if (cache != null && numScans > 1) { // we only want to put this TermInfo into the cache if // scanEnum skipped more than one dictionary entry. // This prevents RangeQueries or WildcardQueries to // wipe out the cache when they iterate over a large numbers // of terms in order cache.Put(term, ti); } } else { ti = null; } return(ti); } } // random-access: must seek SeekEnum(enumerator, GetIndexOffset(term)); enumerator.ScanTo(term); if (enumerator.Term() != null && term.CompareTo(enumerator.Term()) == 0) { ti = enumerator.TermInfo(); if (cache != null) { cache.Put(term, ti); } } else { ti = null; } return(ti); }
public override void Seek(Term term) { base.Seek(term); delCount = 0; }
public override TermEnum Terms(Term term) { EnsureOpen(); return new MultiTermEnum(this, subReaders, starts, term); }
public override TermDocs TermDocs(Term term) { EnsureOpen(); return(in_Renamed.TermDocs(term)); }
public ParallelTermEnum(ParallelReader enclosingInstance, Term term) { InitBlock(enclosingInstance); field = term.Field(); IndexReader reader = ((IndexReader) Enclosing_Instance.fieldToReader[field]); if (reader != null) termEnum = reader.Terms(term); }
/// <summary>Returns an enumeration of terms starting at or after the named term. </summary> public SegmentTermEnum Terms(Term term) { // don't use the cache in this call because we want to reposition the // enumeration Get(term, false); return (SegmentTermEnum) GetThreadResources().termEnum.Clone(); }
public override TermDocs TermDocs(Term term) { EnsureOpen(); return new ParallelTermDocs(this, term); }
public override TermPositions TermPositions(Term term) { EnsureOpen(); return new ParallelTermPositions(this, term); }
public override int DocFreq(Term term) { EnsureOpen(); IndexReader reader = ((IndexReader) fieldToReader[term.Field()]); return reader == null?0:reader.DocFreq(term); }
public override TermEnum Terms(Term term) { EnsureOpen(); return new ParallelTermEnum(this, term); }
public override int DocFreq(Term t) { EnsureOpen(); TermInfo ti = core.GetTermsReader().Get(t); if (ti != null) return ti.docFreq; else return 0; }
/// <summary>Returns the TermInfo for a Term in the set, or null. </summary> private TermInfo Get(Term term, bool useCache) { if (size == 0) return null; EnsureIndexIsRead(); TermInfo ti; ThreadResources resources = GetThreadResources(); Mono.Lucene.Net.Util.Cache.Cache cache = null; if (useCache) { cache = resources.termInfoCache; // check the cache first if the term was recently looked up ti = (TermInfo) cache.Get(term); if (ti != null) { return ti; } } // optimize sequential access: first try scanning cached enum w/o seeking SegmentTermEnum enumerator = resources.termEnum; if (enumerator.Term() != null && ((enumerator.Prev() != null && term.CompareTo(enumerator.Prev()) > 0) || term.CompareTo(enumerator.Term()) >= 0)) { int enumOffset = (int) (enumerator.position / totalIndexInterval) + 1; if (indexTerms.Length == enumOffset || term.CompareTo(indexTerms[enumOffset]) < 0) { // no need to seek int numScans = enumerator.ScanTo(term); if (enumerator.Term() != null && term.CompareTo(enumerator.Term()) == 0) { ti = enumerator.TermInfo(); if (cache != null && numScans > 1) { // we only want to put this TermInfo into the cache if // scanEnum skipped more than one dictionary entry. // This prevents RangeQueries or WildcardQueries to // wipe out the cache when they iterate over a large numbers // of terms in order cache.Put(term, ti); } } else { ti = null; } return ti; } } // random-access: must seek SeekEnum(enumerator, GetIndexOffset(term)); enumerator.ScanTo(term); if (enumerator.Term() != null && term.CompareTo(enumerator.Term()) == 0) { ti = enumerator.TermInfo(); if (cache != null) { cache.Put(term, ti); } } else { ti = null; } return ti; }
public virtual void Seek(Term term) { in_Renamed.Seek(term); }
/// <summary>Returns the position of a Term in the set or -1. </summary> internal long GetPosition(Term term) { if (size == 0) return - 1; EnsureIndexIsRead(); int indexOffset = GetIndexOffset(term); SegmentTermEnum enumerator = GetThreadResources().termEnum; SeekEnum(enumerator, indexOffset); while (term.CompareTo(enumerator.Term()) > 0 && enumerator.Next()) { } if (term.CompareTo(enumerator.Term()) == 0) return enumerator.position; else return - 1; }
public void Set(TermBuffer other) { text.CopyText(other.text); dirty = true; field = other.field; term = other.term; }
internal void Add(Term term, TermInfo ti) { UnicodeUtil.UTF16toUTF8(term.text, 0, term.text.Length, utf8Result); Add(fieldInfos.FieldNumber(term.field), utf8Result.result, utf8Result.length, ti); }
public Term ToTerm() { if (field == null) // unset return null; if (term == null) term = new Term(field, new System.String(text.result, 0, text.length), false); return term; }
public override int DocFreq(Term t) { EnsureOpen(); int total = 0; // sum freqs in segments for (int i = 0; i < subReaders.Length; i++) total += subReaders[i].DocFreq(t); return total; }
public override int DocFreq(Term t) { EnsureOpen(); return(in_Renamed.DocFreq(t)); }
public ParallelTermDocs(ParallelReader enclosingInstance, Term term) { InitBlock(enclosingInstance); if (term == null) termDocs = (Enclosing_Instance.readers.Count == 0)?null:((IndexReader) Enclosing_Instance.readers[0]).TermDocs(null); else Seek(term); }
/// <summary>Returns the TermInfo for a Term in the set, or null. </summary> internal TermInfo Get(Term term) { return(Get(term, true)); }
public void Set(Term term) { if (term == null) { Reset(); return ; } System.String termText = term.Text(); int termLen = termText.Length; text.SetLength(termLen); SupportClass.TextSupport.GetCharsFromString(termText, 0, termLen, text.result, 0); dirty = true; field = term.Field(); this.term = term; }
public virtual void Seek(Term term) { IndexReader reader = ((IndexReader) Enclosing_Instance.fieldToReader[term.Field()]); termDocs = reader != null?reader.TermDocs(term):null; }
public void Reset() { field = null; text.SetLength(0); term = null; dirty = true; }
public ParallelTermPositions(ParallelReader enclosingInstance, Term term):base(enclosingInstance) { InitBlock(enclosingInstance); Seek(term); }
public override void Seek(Term term) { IndexReader reader = ((IndexReader) Enclosing_Instance.fieldToReader[term.Field()]); termDocs = reader != null?reader.TermPositions(term):null; }
public override TermEnum Terms(Term t) { EnsureOpen(); return(in_Renamed.Terms(t)); }