public static void BeforeClassSortingAtomicReaderTest() { // sort the index by id (as integer, in NUMERIC_DV_FIELD) Sort sort = new Sort(new SortField(NUMERIC_DV_FIELD, SortField.Type_e.INT)); Sorter.DocMap docMap = new Sorter(sort).Sort(reader); // Sorter.compute also sorts the values NumericDocValues dv = reader.GetNumericDocValues(NUMERIC_DV_FIELD); sortedValues = new int[reader.MaxDoc]; for (int i = 0; i < reader.MaxDoc; ++i) { sortedValues[docMap.OldToNew(i)] = (int)dv.Get(i); } if (VERBOSE) { Console.WriteLine("docMap: " + docMap); Console.WriteLine("sortedValues: " + Arrays.ToString(sortedValues)); } // sort the index by id (as integer, in NUMERIC_DV_FIELD) reader = SortingAtomicReader.Wrap(reader, sort); if (VERBOSE) { Console.WriteLine("mapped-deleted-docs: "); Bits mappedLiveDocs = reader.LiveDocs; for (int i = 0; i < mappedLiveDocs.Length(); i++) { if (!mappedLiveDocs.Get(i)) { Console.WriteLine(i + " "); } } Console.WriteLine(); } TestUtil.CheckReader(reader); }
internal SortingDocsAndPositionsEnum(int maxDoc, SortingDocsAndPositionsEnum reuse, DocsAndPositionsEnum @in, Sorter.DocMap docMap, bool storeOffsets) : base(@in) { this.maxDoc = maxDoc; this.storeOffsets = storeOffsets; if (reuse != null) { docs = reuse.docs; offsets = reuse.offsets; payload = reuse.payload; file = reuse.file; if (reuse.maxDoc == maxDoc) { sorter = reuse.sorter; } else { sorter = new DocOffsetSorter(maxDoc); } } else { docs = new int[32]; offsets = new long[32]; payload = new BytesRef(32); file = new RAMFile(); sorter = new DocOffsetSorter(maxDoc); } using (IndexOutput @out = new RAMOutputStream(file)) { int doc; int i = 0; while ((doc = @in.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (i == docs.Length) { int newLength = ArrayUtil.Oversize(i + 1, 4); docs = Arrays.CopyOf(docs, newLength); offsets = Arrays.CopyOf(offsets, newLength); } docs[i] = docMap.OldToNew(doc); offsets[i] = @out.FilePointer; AddPositions(@in, @out); i++; } upto = i; sorter.Reset(docs, offsets); sorter.Sort(0, upto); } this.postingInput = new RAMInputStream("", file); }
internal SortingDocsEnum(int maxDoc, SortingDocsEnum reuse, DocsEnum @in, bool withFreqs, Sorter.DocMap docMap) : base(@in) { this.maxDoc = maxDoc; this.withFreqs = withFreqs; if (reuse != null) { if (reuse.maxDoc == maxDoc) { sorter = reuse.sorter; } else { sorter = new DocFreqSorter(maxDoc); } docs = reuse.docs; freqs = reuse.freqs; // maybe null } else { docs = new int[64]; sorter = new DocFreqSorter(maxDoc); } docIt = -1; int i = 0; int doc; if (withFreqs) { if (freqs == null || freqs.Length < docs.Length) { freqs = new int[docs.Length]; } while ((doc = @in.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (i >= docs.Length) { docs = ArrayUtil.Grow(docs, docs.Length + 1); freqs = ArrayUtil.Grow(freqs, freqs.Length + 1); } docs[i] = docMap.OldToNew(doc); freqs[i] = @in.Freq(); ++i; } } else { freqs = null; while ((doc = @in.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (i >= docs.Length) { docs = ArrayUtil.Grow(docs, docs.Length + 1); } docs[i++] = docMap.OldToNew(doc); } } // TimSort can save much time compared to other sorts in case of // reverse sorting, or when sorting a concatenation of sorted readers sorter.Reset(docs, freqs); sorter.Sort(0, i); upto = i; }