// start is inclusive; end is exclusive (length = end-start) public BitsSlice(IBits parent, ReaderSlice slice) { this.parent = parent; this.start = slice.Start; this.length = slice.Length; Debug.Assert(length >= 0, "length=" + length); }
public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.SubSlice = subSlice; this.Index = index; if (Debugging.AssertsEnabled) { Debugging.Assert(subSlice.Length >= 0, () => "length=" + subSlice.Length); } }
// start is inclusive; end is exclusive (length = end-start) public BitsSlice(IBits parent, ReaderSlice slice) { this.parent = parent; this.start = slice.Start; this.length = slice.Length; if (Debugging.AssertsEnabled) { Debugging.Assert(length >= 0, () => "length=" + length); } }
/// <summary> /// Creates an ordinal map that allows mapping ords to/from a merged /// space from <c>subs</c>. </summary> /// <param name="owner"> a cache key </param> /// <param name="subs"> <see cref="TermsEnum"/>s that support <see cref="TermsEnum.Ord"/>. They need /// not be dense (e.g. can be FilteredTermsEnums). </param> /// <exception cref="IOException"> if an I/O error occurred. </exception> public OrdinalMap(object owner, TermsEnum[] subs) { // create the ordinal mappings by pulling a termsenum over each sub's // unique terms, and walking a multitermsenum over those this.owner = owner; globalOrdDeltas = new MonotonicAppendingInt64Buffer(PackedInt32s.COMPACT); firstSegments = new AppendingPackedInt64Buffer(PackedInt32s.COMPACT); ordDeltas = new MonotonicAppendingInt64Buffer[subs.Length]; for (int i = 0; i < ordDeltas.Length; i++) { ordDeltas[i] = new MonotonicAppendingInt64Buffer(); } long[] segmentOrds = new long[subs.Length]; ReaderSlice[] slices = new ReaderSlice[subs.Length]; TermsEnumIndex[] indexes = new TermsEnumIndex[slices.Length]; for (int i = 0; i < slices.Length; i++) { slices[i] = new ReaderSlice(0, 0, i); indexes[i] = new TermsEnumIndex(subs[i], i); } MultiTermsEnum mte = new MultiTermsEnum(slices); mte.Reset(indexes); long globalOrd = 0; while (mte.MoveNext()) { TermsEnumWithSlice[] matches = mte.MatchArray; for (int i = 0; i < mte.MatchCount; i++) { int segmentIndex = matches[i].Index; long segmentOrd = matches[i].Terms.Ord; long delta = globalOrd - segmentOrd; // for each unique term, just mark the first segment index/delta where it occurs if (i == 0) { firstSegments.Add(segmentIndex); globalOrdDeltas.Add(delta); } // for each per-segment ord, map it back to the global term. while (segmentOrds[segmentIndex] <= segmentOrd) { ordDeltas[segmentIndex].Add(delta); segmentOrds[segmentIndex]++; } } globalOrd++; } firstSegments.Freeze(); globalOrdDeltas.Freeze(); for (int i = 0; i < ordDeltas.Length; ++i) { ordDeltas[i].Freeze(); } }
/// <summary> /// Returns a sub-Bits matching the provided <paramref name="slice"/> /// <para/> /// Because <c>null</c> usually has a special meaning for /// <see cref="IBits"/> (e.g. no deleted documents), you must check /// <see cref="SubResult.Matches"/> instead to ensure the sub was /// actually found. /// </summary> public SubResult GetMatchingSub(ReaderSlice slice) { int reader = ReaderUtil.SubIndex(slice.Start, starts); Debug.Assert(reader != -1); Debug.Assert(reader < subs.Length, "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]); SubResult subResult = new SubResult(); if (starts[reader] == slice.Start && starts[1 + reader] == slice.Start + slice.Length) { subResult.Matches = true; subResult.Result = subs[reader]; } else { subResult.Matches = false; } return(subResult); }
public TermsEnumWithSlice(int index, ReaderSlice subSlice) { this.SubSlice = subSlice; this.Index = index; Debug.Assert(subSlice.Length >= 0, "length=" + subSlice.Length); }