Exemplo n.º 1
0
        /// <summary>
        /// The terms array must be newly created <see cref="TermsEnum"/>, ie
        /// <see cref="TermsEnum.MoveNext()"/> has not yet been called.
        /// </summary>
        public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex)
        {
            if (Debugging.AssertsEnabled)
            {
                Debugging.Assert(termsEnumsIndex.Length <= top.Length);
            }
            numSubs  = 0;
            numTop   = 0;
            termComp = null;
            queue.Clear();
            for (int i = 0; i < termsEnumsIndex.Length; i++)
            {
                TermsEnumIndex termsEnumIndex = termsEnumsIndex[i];
                if (Debugging.AssertsEnabled)
                {
                    Debugging.Assert(termsEnumIndex != null);
                }

                // init our term comp
                if (termComp == null)
                {
                    queue.termComp = termComp = termsEnumIndex.TermsEnum.Comparer;
                }
                else
                {
                    // We cannot merge sub-readers that have
                    // different TermComps
                    IComparer <BytesRef> subTermComp = termsEnumIndex.TermsEnum.Comparer;
                    if (subTermComp != null && !subTermComp.Equals(termComp))
                    {
                        throw new InvalidOperationException("sub-readers have different BytesRef.Comparers: " + subTermComp + " vs " + termComp + "; cannot merge");
                    }
                }

                BytesRef term;
                if (termsEnumIndex.TermsEnum.MoveNext())
                {
                    term = termsEnumIndex.TermsEnum.Term;
                    TermsEnumWithSlice entry = subs[termsEnumIndex.SubIndex];
                    entry.Reset(termsEnumIndex.TermsEnum, term);
                    queue.Add(entry);
                    currentSubs[numSubs++] = entry;
                }
                else
                {
                    // field has no terms
                }
            }

            if (queue.Count == 0)
            {
                return(TermsEnum.EMPTY);
            }
            else
            {
                return(this);
            }
        }
Exemplo n.º 2
0
            /// <summary>
            /// Creates an ordinal map that allows mapping ords to/from a merged
            /// space from <c>subs</c>. </summary>
            /// <param name="owner"> a cache key </param>
            /// <param name="subs"> <see cref="TermsEnum"/>s that support <see cref="TermsEnum.Ord"/>. They need
            ///             not be dense (e.g. can be FilteredTermsEnums). </param>
            /// <exception cref="System.IO.IOException"> if an I/O error occurred. </exception>
            public OrdinalMap(object owner, TermsEnum[] subs)
            {
                // create the ordinal mappings by pulling a termsenum over each sub's
                // unique terms, and walking a multitermsenum over those
                this.owner      = owner;
                globalOrdDeltas = new MonotonicAppendingInt64Buffer(PackedInt32s.COMPACT);
                firstSegments   = new AppendingPackedInt64Buffer(PackedInt32s.COMPACT);
                ordDeltas       = new MonotonicAppendingInt64Buffer[subs.Length];
                for (int i = 0; i < ordDeltas.Length; i++)
                {
                    ordDeltas[i] = new MonotonicAppendingInt64Buffer();
                }
                long[]           segmentOrds = new long[subs.Length];
                ReaderSlice[]    slices      = new ReaderSlice[subs.Length];
                TermsEnumIndex[] indexes     = new TermsEnumIndex[slices.Length];
                for (int i = 0; i < slices.Length; i++)
                {
                    slices[i]  = new ReaderSlice(0, 0, i);
                    indexes[i] = new TermsEnumIndex(subs[i], i);
                }
                MultiTermsEnum mte = new MultiTermsEnum(slices);

                mte.Reset(indexes);
                long globalOrd = 0;

                while (mte.Next() != null)
                {
                    TermsEnumWithSlice[] matches = mte.MatchArray;
                    for (int i = 0; i < mte.MatchCount; i++)
                    {
                        int  segmentIndex = matches[i].Index;
                        long segmentOrd   = matches[i].Terms.Ord;
                        long delta        = globalOrd - segmentOrd;
                        // for each unique term, just mark the first segment index/delta where it occurs
                        if (i == 0)
                        {
                            firstSegments.Add(segmentIndex);
                            globalOrdDeltas.Add(delta);
                        }
                        // for each per-segment ord, map it back to the global term.
                        while (segmentOrds[segmentIndex] <= segmentOrd)
                        {
                            ordDeltas[segmentIndex].Add(delta);
                            segmentOrds[segmentIndex]++;
                        }
                    }
                    globalOrd++;
                }
                firstSegments.Freeze();
                globalOrdDeltas.Freeze();
                for (int i = 0; i < ordDeltas.Length; ++i)
                {
                    ordDeltas[i].Freeze();
                }
            }
Exemplo n.º 3
0
        /// <summary>
        /// The terms array must be newly created TermsEnum, ie
        ///  <seealso cref="TermsEnum#next"/> has not yet been called.
        /// </summary>
        public TermsEnum Reset(TermsEnumIndex[] termsEnumsIndex)
        {
            Debug.Assert(termsEnumsIndex.Length <= Top.Length);
            NumSubs  = 0;
            NumTop   = 0;
            TermComp = null;
            Queue.Clear();
            for (int i = 0; i < termsEnumsIndex.Length; i++)
            {
                TermsEnumIndex termsEnumIndex = termsEnumsIndex[i];
                Debug.Assert(termsEnumIndex != null);

                // init our term comp
                if (TermComp == null)
                {
                    Queue.TermComp = TermComp = termsEnumIndex.TermsEnum.Comparator;
                }
                else
                {
                    // We cannot merge sub-readers that have
                    // different TermComps
                    IComparer <BytesRef> subTermComp = termsEnumIndex.TermsEnum.Comparator;
                    if (subTermComp != null && !subTermComp.Equals(TermComp))
                    {
                        throw new InvalidOperationException("sub-readers have different BytesRef.Comparators: " + subTermComp + " vs " + TermComp + "; cannot merge");
                    }
                }

                BytesRef term = termsEnumIndex.TermsEnum.Next();
                if (term != null)
                {
                    TermsEnumWithSlice entry = Subs[termsEnumIndex.SubIndex];
                    entry.Reset(termsEnumIndex.TermsEnum, term);
                    Queue.Add(entry);
                    CurrentSubs[NumSubs++] = entry;
                }
                else
                {
                    // field has no terms
                }
            }

            if (Queue.Size() == 0)
            {
                return(TermsEnum.EMPTY);
            }
            else
            {
                return(this);
            }
        }
Exemplo n.º 4
0
 /// <summary>
 /// Creates an ordinal map that allows mapping ords to/from a merged
 /// space from <code>subs</code>. </summary>
 /// <param name="owner"> a cache key </param>
 /// <param name="subs"> TermsEnums that support <seealso cref="TermsEnum#ord()"/>. They need
 ///             not be dense (e.g. can be FilteredTermsEnums}. </param>
 /// <exception cref="IOException"> if an I/O error occurred. </exception>
 public OrdinalMap(object owner, TermsEnum[] subs)
 {
     // create the ordinal mappings by pulling a termsenum over each sub's
     // unique terms, and walking a multitermsenum over those
     this.Owner = owner;
     GlobalOrdDeltas = new MonotonicAppendingLongBuffer(PackedInts.COMPACT);
     FirstSegments = new AppendingPackedLongBuffer(PackedInts.COMPACT);
     OrdDeltas = new MonotonicAppendingLongBuffer[subs.Length];
     for (int i = 0; i < OrdDeltas.Length; i++)
     {
         OrdDeltas[i] = new MonotonicAppendingLongBuffer();
     }
     long[] segmentOrds = new long[subs.Length];
     ReaderSlice[] slices = new ReaderSlice[subs.Length];
     TermsEnumIndex[] indexes = new TermsEnumIndex[slices.Length];
     for (int i = 0; i < slices.Length; i++)
     {
         slices[i] = new ReaderSlice(0, 0, i);
         indexes[i] = new TermsEnumIndex(subs[i], i);
     }
     MultiTermsEnum mte = new MultiTermsEnum(slices);
     mte.Reset(indexes);
     long globalOrd = 0;
     while (mte.Next() != null)
     {
         TermsEnumWithSlice[] matches = mte.MatchArray;
         for (int i = 0; i < mte.MatchCount; i++)
         {
             int segmentIndex = matches[i].Index;
             long segmentOrd = matches[i].Terms.Ord();
             long delta = globalOrd - segmentOrd;
             // for each unique term, just mark the first segment index/delta where it occurs
             if (i == 0)
             {
                 FirstSegments.Add(segmentIndex);
                 GlobalOrdDeltas.Add(delta);
             }
             // for each per-segment ord, map it back to the global term.
             while (segmentOrds[segmentIndex] <= segmentOrd)
             {
                 OrdDeltas[segmentIndex].Add(delta);
                 segmentOrds[segmentIndex]++;
             }
         }
         globalOrd++;
     }
     FirstSegments.Freeze();
     GlobalOrdDeltas.Freeze();
     for (int i = 0; i < OrdDeltas.Length; ++i)
     {
         OrdDeltas[i].Freeze();
     }
 }