public override int Map(int old) { int oldWithDeletes = old + (int)deletes.Get(old); int newWithDeletes = outerInstance.docMap.OldToNew(oldWithDeletes); return(mergeState.DocMaps[0].Get(newWithDeletes)); }
public override int Get(int docID) { if (!LiveDocs.Get(docID)) { return(-1); } return((int)DocMap.Get(docID)); }
/// <summary> /// Computes the old-to-new permutation over the given comparator. /// </summary> private static Sorter.DocMap Sort(int maxDoc, DocComparator comparator) { // check if the index is sorted bool sorted = true; for (int i = 1; i < maxDoc; ++i) { if (comparator.Compare(i - 1, i) > 0) { sorted = false; break; } } if (sorted) { return(null); } // sort doc IDs int[] docs = new int[maxDoc]; for (int i = 0; i < maxDoc; i++) { docs[i] = i; } DocValueSorter sorter = new DocValueSorter(docs, comparator); // It can be common to sort a reader, add docs, sort it again, ... and in // that case timSort can save a lot of time sorter.Sort(0, docs.Length); // docs is now the newToOld mapping // The reason why we use MonotonicAppendingLongBuffer here is that it // wastes very little memory if the index is in random order but can save // a lot of memory if the index is already "almost" sorted MonotonicAppendingLongBuffer newToOld = new MonotonicAppendingLongBuffer(); for (int i = 0; i < maxDoc; ++i) { newToOld.Add(docs[i]); } newToOld.Freeze(); for (int i = 0; i < maxDoc; ++i) { docs[(int)newToOld.Get(i)] = i; } // docs is now the oldToNew mapping MonotonicAppendingLongBuffer oldToNew = new MonotonicAppendingLongBuffer(); for (int i = 0; i < maxDoc; ++i) { oldToNew.Add(docs[i]); } oldToNew.Freeze(); return(new DocMapAnonymousInnerClassHelper(maxDoc, newToOld, oldToNew)); }
internal virtual void AdvanceWord(int targetWordNum) { Debug.Assert(targetWordNum > WordNum); int delta = targetWordNum - WordNum; if (delta <= AllOnesLength + DirtyLength + 1) { SkipDirtyBytes(delta - 1); } else { SkipDirtyBytes(); Debug.Assert(DirtyLength == 0); if (delta > IndexThreshold_Renamed) { // use the index int i = ForwardBinarySearch(targetWordNum); int position = (int)Positions.Get(i); if (position > @in.Position) // if the binary search returned a backward offset, don't move { WordNum = (int)WordNums.Get(i) - 1; @in.Position = position; SequenceNum = i * IndexInterval - 1; } } while (true) { if (!ReadSequence()) { return; } delta = targetWordNum - WordNum; if (delta <= AllOnesLength + DirtyLength + 1) { if (delta > 1) { SkipDirtyBytes(delta - 1); } break; } SkipDirtyBytes(); } } NextWord(); }
internal virtual int ForwardBinarySearch(int target) { // advance forward and double the window at each step int indexSize = (int)DocIDs.Size(); int lo = Math.Max(BlockIdx / IndexInterval, 0), hi = lo + 1; Debug.Assert(BlockIdx == -1 || DocIDs.Get(lo) <= DocID_Renamed); Debug.Assert(lo + 1 == DocIDs.Size() || DocIDs.Get(lo + 1) > DocID_Renamed); while (true) { if (hi >= indexSize) { hi = indexSize - 1; break; } else if (DocIDs.Get(hi) >= target) { break; } int newLo = hi; hi += (hi - lo) << 1; lo = newLo; } // we found a window containing our target, let's binary search now while (lo <= hi) { int mid = (int)((uint)(lo + hi) >> 1); int midDocID = (int)DocIDs.Get(mid); if (midDocID <= target) { lo = mid + 1; } else { hi = mid - 1; } } Debug.Assert(DocIDs.Get(hi) <= target); Debug.Assert(hi + 1 == DocIDs.Size() || DocIDs.Get(hi + 1) > target); return(hi); }
public override int NewToOld(int docID) { return((int)newToOld.Get(docID)); }
public override int OldToNew(int docID) { return((int)oldToNew.Get(docID)); }
/// <summary> /// Given global ordinal, returns the ordinal of the first segment which contains /// this ordinal (the corresponding to the segment return <seealso cref="#getFirstSegmentNumber"/>). /// </summary> public virtual long GetFirstSegmentOrd(long globalOrd) { return(globalOrd - GlobalOrdDeltas.Get(globalOrd)); }