protected override void Dispose(bool disposing) { if (isDisposed) { return; } if (isClone || buffers == null) { return; } try { for (int bufNr = 0; bufNr < buffers.Length; bufNr++) { // unmap the buffer (if enabled) and at least unset it for GC try { Enclosing_Instance.CleanMapping(buffers[bufNr]); } finally { buffers[bufNr] = null; } } } finally { buffers = null; } isDisposed = true; }
protected override void Dispose(bool isDisposing) { if (isDisposed) { return; } if (isDisposing) { if (isClone || buffer == null) { return; } // unmap the buffer (if enabled) and at least unset it for GC try { Enclosing_Instance.CleanMapping(buffer); } finally { buffer = null; } } isDisposed = true; }
public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer, IState state) { if (Enclosing_Instance.terms.Count == 0) { // optimize zero-term case return(null); } TermPositions[] tps = new TermPositions[Enclosing_Instance.terms.Count]; for (int i = 0; i < Enclosing_Instance.terms.Count; i++) { TermPositions p = reader.TermPositions(Enclosing_Instance.terms[i], state); if (p == null) { return(null); } tps[i] = p; } if (Enclosing_Instance.slop == 0) { // optimize exact case return(new ExactPhraseScorer(this, tps, Enclosing_Instance.GetPositions(), similarity, reader.Norms(Enclosing_Instance.field, state))); } else { return(new SloppyPhraseScorer(this, tps, Enclosing_Instance.GetPositions(), similarity, Enclosing_Instance.slop, reader.Norms(Enclosing_Instance.field, state))); } }
private void InitBlock(Lucene.Net.Index.IndexReader reader, Lucene.Net.Index.TermEnum enumerator, System.String field, AnonymousClassSortComparatorSource enclosingInstance) { this.reader = reader; this.enumerator = enumerator; this.field = field; this.enclosingInstance = enclosingInstance; cachedValues = Enclosing_Instance.FillCache(reader, enumerator, field); }
/// <summary> /// Insert an entry ar the current location.Cursor positionis not incremented.So new entry is retived using next call /// </summary> /// <param name="key"></param> /// <param name="val"></param> public void Insert(Object key, Object val) { if (current == -1) { MoveNext(); } Enclosing_Instance.Add(current, key, val); //System.Console.Out.WriteLine("Added " + key + "-" + val + ", Cursor Position: #" + current); }
/// <summary>Add an entry at the current location. The new entry goes before /// the entry that would be returned in the next 'next' call, and /// that call will not be affected by the insertion. /// Note: this method is not in the IEnumerator interface.</summary> public void Add(System.Object val) { if (current == -1) { MoveNext(); } int nKey = Enclosing_Instance.unkeyedIndex; Enclosing_Instance.unkeyedIndex += 1; Enclosing_Instance.Add(current, nKey, val); //System.Console.Out.WriteLine("Added " + nKey + "-" + val + ", Cursor Position: #" + current); MoveNext(); }
/// <summary>Remove the current entry. Note that remove can /// be called only after a call to next, and only once per such /// call. Remove cannot be called after a call to prev.</summary> public void Remove() { if (current >= 0 && current <= Enclosing_Instance.Count) { Enclosing_Instance.RemoveElement(current); // If we just removed the last entry, then we need // to go back one. if (current > -1) { current -= 1; } } }
/// <summary>Remove the current entry. Note that remove can /// be called only after a call to next, and only once per such /// call. Remove cannot be called after a call to prev.</summary> public void Remove() { if (current == null) { throw new System.SystemException("Removed called in invalid cursor state"); } else { HashedListElement e = current.prev; Enclosing_Instance.RemoveElement(current); current = e; last = current == null ? null : current.prev; } }
/// <summary>The value of the field as a String, or null. If null, the Reader value, /// binary value, or TokenStream value is used. Exactly one of StringValue(), /// ReaderValue(), GetBinaryValue(), and TokenStreamValue() must be set. /// </summary> public override string StringValue(IState state) { Enclosing_Instance.EnsureOpen(); if (internalIsBinary) { return(null); } if (fieldsData == null) { IndexInput localFieldsStream = GetFieldStream(state); try { localFieldsStream.Seek(pointer, state); if (isCompressed) { var b = new byte[toRead]; localFieldsStream.ReadBytes(b, 0, b.Length, state); fieldsData = System.Text.Encoding.GetEncoding("UTF-8").GetString(Enclosing_Instance.Uncompress(b)); } else { if (Enclosing_Instance.format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) { var bytes = new byte[toRead]; localFieldsStream.ReadBytes(bytes, 0, toRead, state); fieldsData = System.Text.Encoding.GetEncoding("UTF-8").GetString(bytes); } else { //read in chars b/c we already know the length we need to read var chars = new char[toRead]; localFieldsStream.ReadChars(chars, 0, toRead, state); fieldsData = new System.String(chars); } } } catch (System.IO.IOException e) { throw new FieldReaderException(e); } } return((System.String)fieldsData); }
public override byte[] GetBinaryValue(byte[] result) { Enclosing_Instance.EnsureOpen(); if (internalIsBinary) { if (fieldsData == null) { // Allocate new buffer if result is null or too small byte[] b; if (result == null || result.Length < toRead) { b = new byte[toRead]; } else { b = result; } IndexInput localFieldsStream = GetFieldStream(); // Throw this IOException since IndexReader.document does so anyway, so probably not that big of a change for people // since they are already handling this exception when getting the document try { localFieldsStream.Seek(pointer); localFieldsStream.ReadBytes(b, 0, toRead); fieldsData = isCompressed ? Enclosing_Instance.Uncompress(b) : b; } catch (IOException e) { throw new FieldReaderException(e); } internalbinaryOffset = 0; internalBinaryLength = toRead; } return((byte[])fieldsData); } return(null); }
public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer) { if (Enclosing_Instance.termArrays.Count == 0) { // optimize zero-term case return(null); } TermPositions[] tps = new TermPositions[Enclosing_Instance.termArrays.Count]; for (int i = 0; i < tps.Length; i++) { Term[] terms = Enclosing_Instance.termArrays[i]; TermPositions p; if (terms.Length > 1) { p = new MultipleTermPositions(reader, terms); } else { p = reader.TermPositions(terms[0]); } if (p == null) { return(null); } tps[i] = p; } if (Enclosing_Instance.slop == 0) { return(new ExactPhraseScorer(this, tps, Enclosing_Instance.GetPositions(), similarity, reader.Norms(Enclosing_Instance.field))); } else { return(new SloppyPhraseScorer(this, tps, Enclosing_Instance.GetPositions(), similarity, Enclosing_Instance.slop, reader.Norms(Enclosing_Instance.field))); } }
private Explanation DoExplain(IndexReader reader, int doc) { Explanation subQueryExpl = subQueryWeight.Explain(reader, doc); if (!subQueryExpl.IsMatch) { return(subQueryExpl); } // match Explanation[] valSrcExpls = new Explanation[valSrcWeights.Length]; for (int i = 0; i < valSrcWeights.Length; i++) { valSrcExpls[i] = valSrcWeights[i].Explain(reader, doc); } Explanation customExp = Enclosing_Instance.GetCustomScoreProvider(reader).CustomExplain(doc, subQueryExpl, valSrcExpls); float sc = Value * customExp.Value; Explanation res = new ComplexExplanation(true, sc, Enclosing_Instance.ToString() + ", product of:"); res.AddDetail(customExp); res.AddDetail(new Explanation(Value, "queryBoost")); // actually using the q boost as q weight (== weight value) return(res); }
virtual public int ComputedSenderIPChecksum() { return(Enclosing_Instance.ComputeIPChecksum(false)); }
/// <summary>Add a keyed entry at the current location. The new entry is inserted /// before the entry that would be returned in the next invocation of /// 'next'. The return value for that call is unaffected. /// Note: this method is not in the IEnumerator interface. /// </summary> public void Add(System.Object key, System.Object val) { HashedListElement newObj = Enclosing_Instance.Add(current, key, val); MoveNext(); }
public void Insert(Object key, Object val) { Enclosing_Instance.Insert(current, key, val); }
override public void Run() { // First time through the while loop we do the merge // that we were started with: MergePolicy.OneMerge merge = this.startMerge; try { if (Enclosing_Instance.Verbose()) { Enclosing_Instance.Message(" merge thread: start"); } while (true) { SetRunningMerge(merge); Enclosing_Instance.DoMerge(merge); // Subsequent times through the loop we do any new // merge that writer says is necessary: merge = writer.GetNextMerge(); if (merge != null) { writer.MergeInit(merge); if (Enclosing_Instance.Verbose()) { Enclosing_Instance.Message(" merge thread: do another merge " + merge.SegString(Enclosing_Instance.dir)); } } else { break; } } if (Enclosing_Instance.Verbose()) { Enclosing_Instance.Message(" merge thread: done"); } } catch (System.Exception exc) { // Ignore the exception if it was due to abort: if (!(exc is MergePolicy.MergeAbortedException)) { if (!Enclosing_Instance.suppressExceptions) { // suppressExceptions is normally only set during // testing. Lucene.Net.Index.ConcurrentMergeScheduler.anyExceptions = true; Enclosing_Instance.HandleMergeException(exc); } } } finally { lock (Enclosing_Instance) { System.Threading.Monitor.PulseAll(Enclosing_Instance); Enclosing_Instance.mergeThreads.Remove(this); bool removed = !Enclosing_Instance.mergeThreads.Contains(this); System.Diagnostics.Debug.Assert(removed); } } }