public static void BeforeClassSortingAtomicReaderTest()
        {

            // sort the index by id (as integer, in NUMERIC_DV_FIELD)
            Sort sort = new Sort(new SortField(NUMERIC_DV_FIELD, SortField.Type_e.INT));
            Sorter.DocMap docMap = new Sorter(sort).Sort(reader);

            // Sorter.compute also sorts the values
            NumericDocValues dv = reader.GetNumericDocValues(NUMERIC_DV_FIELD);
            sortedValues = new int[reader.MaxDoc];
            for (int i = 0; i < reader.MaxDoc; ++i)
            {
                sortedValues[docMap.OldToNew(i)] = (int)dv.Get(i);
            }
            if (VERBOSE)
            {
                Console.WriteLine("docMap: " + docMap);
                Console.WriteLine("sortedValues: " + Arrays.ToString(sortedValues));
            }

            // sort the index by id (as integer, in NUMERIC_DV_FIELD)
            reader = SortingAtomicReader.Wrap(reader, sort);

            if (VERBOSE)
            {
                Console.WriteLine("mapped-deleted-docs: ");
                Bits mappedLiveDocs = reader.LiveDocs;
                for (int i = 0; i < mappedLiveDocs.Length(); i++)
                {
                    if (!mappedLiveDocs.Get(i))
                    {
                        Console.WriteLine(i + " ");
                    }
                }
                Console.WriteLine();
            }

            TestUtil.CheckReader(reader);
        }
        public void Test()
        {
            RandomIndexWriter writer;
            DirectoryReader indexReader;
            int numParents = AtLeast(200);
            IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            cfg.SetMergePolicy(NewLogMergePolicy());
            using (writer = new RandomIndexWriter(Random(), NewDirectory(), cfg))
            {
                Document parentDoc = new Document();
                NumericDocValuesField parentVal = new NumericDocValuesField("parent_val", 0L);
                parentDoc.Add(parentVal);
                StringField parent = new StringField("parent", "true", Field.Store.YES);
                parentDoc.Add(parent);
                for (int i = 0; i < numParents; ++i)
                {
                    List<Document> documents = new List<Document>();
                    int numChildren = Random().nextInt(10);
                    for (int j = 0; j < numChildren; ++j)
                    {
                        Document childDoc = new Document();
                        childDoc.Add(new NumericDocValuesField("child_val", Random().nextInt(5)));
                        documents.Add(childDoc);
                    }
                    parentVal.LongValue = (Random().nextInt(50));
                    documents.Add(parentDoc);
                    writer.AddDocuments(documents);
                }
                writer.ForceMerge(1);
                indexReader = writer.Reader;
            }

            AtomicReader reader = GetOnlySegmentReader(indexReader);
            Filter parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("parent", "true"))));
            FixedBitSet parentBits = (FixedBitSet)parentsFilter.GetDocIdSet(reader.AtomicContext, null);
            NumericDocValues parentValues = reader.GetNumericDocValues("parent_val");

            NumericDocValues childValues = reader.GetNumericDocValues("child_val");

            Sort parentSort = new Sort(new SortField("parent_val", SortField.Type_e.LONG));
            Sort childSort = new Sort(new SortField("child_val", SortField.Type_e.LONG));

            Sort sort = new Sort(new SortField("custom", new BlockJoinComparatorSource(parentsFilter, parentSort, childSort)));
            Sorter sorter = new Sorter(sort);
            Sorter.DocMap docMap = sorter.Sort(reader);
            assertEquals(reader.MaxDoc, docMap.Count);

            int[] children = new int[1];
            int numChildren2 = 0;
            int previousParent = -1;
            for (int i = 0; i < docMap.Count; ++i)
            {
                int oldID = docMap.NewToOld(i);
                if (parentBits.Get(oldID))
                {
                    // check that we have the right children
                    for (int j = 0; j < numChildren2; ++j)
                    {
                        assertEquals(oldID, parentBits.NextSetBit(children[j]));
                    }
                    // check that children are sorted
                    for (int j = 1; j < numChildren2; ++j)
                    {
                        int doc1 = children[j - 1];
                        int doc2 = children[j];
                        if (childValues.Get(doc1) == childValues.Get(doc2))
                        {
                            assertTrue(doc1 < doc2); // sort is stable
                        }
                        else
                        {
                            assertTrue(childValues.Get(doc1) < childValues.Get(doc2));
                        }
                    }
                    // check that parents are sorted
                    if (previousParent != -1)
                    {
                        if (parentValues.Get(previousParent) == parentValues.Get(oldID))
                        {
                            assertTrue(previousParent < oldID);
                        }
                        else
                        {
                            assertTrue(parentValues.Get(previousParent) < parentValues.Get(oldID));
                        }
                    }
                    // reset
                    previousParent = oldID;
                    numChildren2 = 0;
                }
                else
                {
                    children = ArrayUtil.Grow(children, numChildren2 + 1);
                    children[numChildren2++] = oldID;
                }
            }
            indexReader.Dispose();
            writer.w.Directory.Dispose();
        }
        internal readonly Sorter.DocMap docMap; // pkg-protected to avoid synthetic accessor methods

        private SortingAtomicReader(AtomicReader @in, Sorter.DocMap docMap) : base(@in)
        {
            this.docMap = docMap;
        }
 /// <summary>
 /// Create a new <see cref="MergePolicy"/> that sorts documents with the given <paramref name="sort"/>.
 /// </summary>
 public SortingMergePolicy(MergePolicy @in, Sort sort)
 {
     this.@in = @in;
     this.sorter = new Sorter(sort);
     this.sort = sort;
 }
 // LUCENENET TODO: Make the FieldInfo.IndexOptions non nullable
 public SortingTerms(Terms @in, FieldInfo.IndexOptions? indexOptions, Sorter.DocMap docMap) : base(@in)
 {
     this.docMap = docMap;
     this.indexOptions = indexOptions;
 }
 /// <summary>
 /// Expert: same as <see cref="Wrap(AtomicReader, Sort)"/> but operates directly on a <see cref="Sorter.DocMap"/>.
 /// </summary>
 internal static AtomicReader Wrap(AtomicReader reader, Sorter.DocMap docMap)
 {
     if (docMap == null)
     {
         // the reader is already sorter
         return reader;
     }
     if (reader.MaxDoc != docMap.Count)
     {
         throw new System.ArgumentException("reader.maxDoc() should be equal to docMap.size(), got" + reader.MaxDoc + " != " + docMap.Count);
     }
     Debug.Assert(Sorter.IsConsistent(docMap));
     return new SortingAtomicReader(reader, docMap);
 }
 public SortingFields(Fields @in, FieldInfos infos, Sorter.DocMap docMap)
     : base(@in)
 {
     this.docMap = docMap;
     this.infos = infos;
 }
 internal SortingDocsAndPositionsEnum(int maxDoc, SortingDocsAndPositionsEnum reuse, DocsAndPositionsEnum @in, Sorter.DocMap docMap, bool storeOffsets)
     : base(@in)
 {
     this.maxDoc = maxDoc;
     this.storeOffsets = storeOffsets;
     if (reuse != null)
     {
         docs = reuse.docs;
         offsets = reuse.offsets;
         payload = reuse.payload;
         file = reuse.file;
         if (reuse.maxDoc == maxDoc)
         {
             sorter = reuse.sorter;
         }
         else
         {
             sorter = new DocOffsetSorter(maxDoc);
         }
     }
     else
     {
         docs = new int[32];
         offsets = new long[32];
         payload = new BytesRef(32);
         file = new RAMFile();
         sorter = new DocOffsetSorter(maxDoc);
     }
     using (IndexOutput @out = new RAMOutputStream(file))
     {
         int doc;
         int i = 0;
         while ((doc = @in.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
         {
             if (i == docs.Length)
             {
                 int newLength = ArrayUtil.Oversize(i + 1, 4);
                 docs = Arrays.CopyOf(docs, newLength);
                 offsets = Arrays.CopyOf(offsets, newLength);
             }
             docs[i] = docMap.OldToNew(doc);
             offsets[i] = @out.FilePointer;
             AddPositions(@in, @out);
             i++;
         }
         upto = i;
         sorter.Reset(docs, offsets);
         sorter.Sort(0, upto);
     }
     this.postingInput = new RAMInputStream("", file);
 }
 internal SortingSortedSetDocValues(SortedSetDocValues @in, Sorter.DocMap docMap)
 {
     this.@in = @in;
     this.docMap = docMap;
 }
 internal SortingDocsEnum(int maxDoc, SortingDocsEnum reuse, DocsEnum @in, bool withFreqs, Sorter.DocMap docMap)
         : base(@in)
 {
     this.maxDoc = maxDoc;
     this.withFreqs = withFreqs;
     if (reuse != null)
     {
         if (reuse.maxDoc == maxDoc)
         {
             sorter = reuse.sorter;
         }
         else
         {
             sorter = new DocFreqSorter(maxDoc);
         }
         docs = reuse.docs;
         freqs = reuse.freqs; // maybe null
     }
     else
     {
         docs = new int[64];
         sorter = new DocFreqSorter(maxDoc);
     }
     docIt = -1;
     int i = 0;
     int doc;
     if (withFreqs)
     {
         if (freqs == null || freqs.Length < docs.Length)
         {
             freqs = new int[docs.Length];
         }
         while ((doc = @in.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
         {
             if (i >= docs.Length)
             {
                 docs = ArrayUtil.Grow(docs, docs.Length + 1);
                 freqs = ArrayUtil.Grow(freqs, freqs.Length + 1);
             }
             docs[i] = docMap.OldToNew(doc);
             freqs[i] = @in.Freq();
             ++i;
         }
     }
     else
     {
         freqs = null;
         while ((doc = @in.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
         {
             if (i >= docs.Length)
             {
                 docs = ArrayUtil.Grow(docs, docs.Length + 1);
             }
             docs[i++] = docMap.OldToNew(doc);
         }
     }
     // TimSort can save much time compared to other sorts in case of
     // reverse sorting, or when sorting a concatenation of sorted readers
     sorter.Reset(docs, freqs);
     sorter.Sort(0, i);
     upto = i;
 }
 public SortingBits(Bits @in, Sorter.DocMap docMap)
 {
     this.@in = @in;
     this.docMap = docMap;
 }
 public SortingNumericDocValues(NumericDocValues @in, Sorter.DocMap docMap)
 {
     this.@in = @in;
     this.docMap = docMap;
 }
 internal SortingBinaryDocValues(BinaryDocValues @in, Sorter.DocMap docMap)
 {
     this.@in = @in;
     this.docMap = docMap;
 }
 public SortingTermsEnum(TermsEnum @in, Sorter.DocMap docMap, FieldInfo.IndexOptions? indexOptions)
     : base(@in)
 {
     this.docMap = docMap;
     this.indexOptions = indexOptions;
 }
Пример #15
0
 public void Sort <TSorter>(ref Sorter <T, TSorter> sorter) where TSorter : struct, IComparer <T>
 {
     sorter.Sort(_items, 0, (int)_size);
 }