public override int Map(int old) { int oldWithDeletes = old + (int)deletes.Get(old); int newWithDeletes = outerInstance.docMap.OldToNew(oldWithDeletes); return(mergeState.DocMaps[0].Get(newWithDeletes)); }
public override int Get(int docID) { if (!liveDocs.Get(docID)) { return(-1); } return((int)docMap.Get(docID)); }
/// <summary> /// Computes the old-to-new permutation over the given comparer. /// </summary> private static Sorter.DocMap Sort(int maxDoc, DocComparer comparer) { // check if the index is sorted bool sorted = true; for (int i = 1; i < maxDoc; ++i) { if (comparer.Compare(i - 1, i) > 0) { sorted = false; break; } } if (sorted) { return(null); } // sort doc IDs int[] docs = new int[maxDoc]; for (int i = 0; i < maxDoc; i++) { docs[i] = i; } DocValueSorter sorter = new DocValueSorter(docs, comparer); // It can be common to sort a reader, add docs, sort it again, ... and in // that case timSort can save a lot of time sorter.Sort(0, docs.Length); // docs is now the newToOld mapping // The reason why we use MonotonicAppendingLongBuffer here is that it // wastes very little memory if the index is in random order but can save // a lot of memory if the index is already "almost" sorted MonotonicAppendingInt64Buffer newToOld = new MonotonicAppendingInt64Buffer(); for (int i = 0; i < maxDoc; ++i) { newToOld.Add(docs[i]); } newToOld.Freeze(); for (int i = 0; i < maxDoc; ++i) { docs[(int)newToOld.Get(i)] = i; } // docs is now the oldToNew mapping MonotonicAppendingInt64Buffer oldToNew = new MonotonicAppendingInt64Buffer(); for (int i = 0; i < maxDoc; ++i) { oldToNew.Add(docs[i]); } oldToNew.Freeze(); return(new DocMapAnonymousClass(maxDoc, newToOld, oldToNew)); }
internal virtual void AdvanceWord(int targetWordNum) { if (Debugging.AssertsEnabled) { Debugging.Assert(targetWordNum > wordNum); } int delta = targetWordNum - wordNum; if (delta <= allOnesLength + dirtyLength + 1) { SkipDirtyBytes(delta - 1); } else { SkipDirtyBytes(); if (Debugging.AssertsEnabled) { Debugging.Assert(dirtyLength == 0); } if (delta > indexThreshold) { // use the index int i = ForwardBinarySearch(targetWordNum); int position = (int)positions.Get(i); if (position > @in.Position) // if the binary search returned a backward offset, don't move { wordNum = (int)wordNums.Get(i) - 1; @in.Position = position; sequenceNum = i * indexInterval - 1; } } while (true) { if (!ReadSequence()) { return; } delta = targetWordNum - wordNum; if (delta <= allOnesLength + dirtyLength + 1) { if (delta > 1) { SkipDirtyBytes(delta - 1); } break; } SkipDirtyBytes(); } } NextWord(); }
internal virtual int ForwardBinarySearch(int target) { // advance forward and double the window at each step int indexSize = (int)docIDs.Count; int lo = Math.Max(blockIdx / indexInterval, 0), hi = lo + 1; if (Debugging.AssertsEnabled) { Debugging.Assert(blockIdx == -1 || docIDs.Get(lo) <= docID); Debugging.Assert(lo + 1 == docIDs.Count || docIDs.Get(lo + 1) > docID); } while (true) { if (hi >= indexSize) { hi = indexSize - 1; break; } else if (docIDs.Get(hi) >= target) { break; } int newLo = hi; hi += (hi - lo) << 1; lo = newLo; } // we found a window containing our target, let's binary search now while (lo <= hi) { int mid = (lo + hi).TripleShift(1); int midDocID = (int)docIDs.Get(mid); if (midDocID <= target) { lo = mid + 1; } else { hi = mid - 1; } } if (Debugging.AssertsEnabled) { Debugging.Assert(docIDs.Get(hi) <= target); Debugging.Assert(hi + 1 == docIDs.Count || docIDs.Get(hi + 1) > target); } return(hi); }
/// <summary> /// Given global ordinal, returns the ordinal of the first segment which contains /// this ordinal (the corresponding to the segment return <see cref="GetFirstSegmentNumber(long)"/>). /// </summary> public virtual long GetFirstSegmentOrd(long globalOrd) { return(globalOrd - globalOrdDeltas.Get(globalOrd)); }
public override int NewToOld(int docID) { return((int)newToOld.Get(docID)); }
public override int OldToNew(int docID) { return((int)oldToNew.Get(docID)); }