Esempio n. 1
0
 // start is inclusive; end is exclusive (length = end-start)
 public BitsSlice(Bits parent, ReaderSlice slice)
 {
     this.Parent = parent;
     this.Start = slice.Start;
     this.Length_Renamed = slice.Length;
     Debug.Assert(Length_Renamed >= 0, "length=" + Length_Renamed);
 }
Esempio n. 2
0
 // start is inclusive; end is exclusive (length = end-start)
 public BitsSlice(Bits parent, ReaderSlice slice)
 {
     this.Parent         = parent;
     this.Start          = slice.Start;
     this.Length_Renamed = slice.Length;
     Debug.Assert(Length_Renamed >= 0, "length=" + Length_Renamed);
 }
Esempio n. 3
0
 // start is inclusive; end is exclusive (length = end-start)
 public BitsSlice(IBits parent, ReaderSlice slice)
 {
     this.parent = parent;
     this.start  = slice.Start;
     this.length = slice.Length;
     Debug.Assert(length >= 0, "length=" + length);
 }
Esempio n. 4
0
 public TermsEnumWithSlice(int index, ReaderSlice subSlice)
 {
     this.SubSlice = subSlice;
     this.Index    = index;
     if (Debugging.AssertsEnabled)
     {
         Debugging.Assert(subSlice.Length >= 0, "length={0}", subSlice.Length);
     }
 }
Esempio n. 5
0
 // start is inclusive; end is exclusive (length = end-start)
 public BitsSlice(IBits parent, ReaderSlice slice)
 {
     this.parent = parent;
     this.start  = slice.Start;
     this.length = slice.Length;
     if (Debugging.AssertsEnabled)
     {
         Debugging.Assert(length >= 0, "length={0}", length);
     }
 }
Esempio n. 6
0
            /// <summary>
            /// Creates an ordinal map that allows mapping ords to/from a merged
            /// space from <c>subs</c>. </summary>
            /// <param name="owner"> a cache key </param>
            /// <param name="subs"> <see cref="TermsEnum"/>s that support <see cref="TermsEnum.Ord"/>. They need
            ///             not be dense (e.g. can be FilteredTermsEnums). </param>
            /// <exception cref="System.IO.IOException"> if an I/O error occurred. </exception>
            public OrdinalMap(object owner, TermsEnum[] subs)
            {
                // create the ordinal mappings by pulling a termsenum over each sub's
                // unique terms, and walking a multitermsenum over those
                this.owner      = owner;
                globalOrdDeltas = new MonotonicAppendingInt64Buffer(PackedInt32s.COMPACT);
                firstSegments   = new AppendingPackedInt64Buffer(PackedInt32s.COMPACT);
                ordDeltas       = new MonotonicAppendingInt64Buffer[subs.Length];
                for (int i = 0; i < ordDeltas.Length; i++)
                {
                    ordDeltas[i] = new MonotonicAppendingInt64Buffer();
                }
                long[]           segmentOrds = new long[subs.Length];
                ReaderSlice[]    slices      = new ReaderSlice[subs.Length];
                TermsEnumIndex[] indexes     = new TermsEnumIndex[slices.Length];
                for (int i = 0; i < slices.Length; i++)
                {
                    slices[i]  = new ReaderSlice(0, 0, i);
                    indexes[i] = new TermsEnumIndex(subs[i], i);
                }
                MultiTermsEnum mte = new MultiTermsEnum(slices);

                mte.Reset(indexes);
                long globalOrd = 0;

                while (mte.Next() != null)
                {
                    TermsEnumWithSlice[] matches = mte.MatchArray;
                    for (int i = 0; i < mte.MatchCount; i++)
                    {
                        int  segmentIndex = matches[i].Index;
                        long segmentOrd   = matches[i].Terms.Ord;
                        long delta        = globalOrd - segmentOrd;
                        // for each unique term, just mark the first segment index/delta where it occurs
                        if (i == 0)
                        {
                            firstSegments.Add(segmentIndex);
                            globalOrdDeltas.Add(delta);
                        }
                        // for each per-segment ord, map it back to the global term.
                        while (segmentOrds[segmentIndex] <= segmentOrd)
                        {
                            ordDeltas[segmentIndex].Add(delta);
                            segmentOrds[segmentIndex]++;
                        }
                    }
                    globalOrd++;
                }
                firstSegments.Freeze();
                globalOrdDeltas.Freeze();
                for (int i = 0; i < ordDeltas.Length; ++i)
                {
                    ordDeltas[i].Freeze();
                }
            }
Esempio n. 7
0
        /// <summary>
        /// Returns a sub-Bits matching the provided <paramref name="slice"/>
        /// <para/>
        /// Because <c>null</c> usually has a special meaning for
        /// <see cref="IBits"/> (e.g. no deleted documents), you must check
        /// <see cref="SubResult.Matches"/> instead to ensure the sub was
        /// actually found.
        /// </summary>
        public SubResult GetMatchingSub(ReaderSlice slice)
        {
            int reader = ReaderUtil.SubIndex(slice.Start, starts);

            Debug.Assert(reader != -1);
            Debug.Assert(reader < subs.Length, "slice=" + slice + " starts[-1]=" + starts[starts.Length - 1]);
            SubResult subResult = new SubResult();

            if (starts[reader] == slice.Start && starts[1 + reader] == slice.Start + slice.Length)
            {
                subResult.Matches = true;
                subResult.Result  = subs[reader];
            }
            else
            {
                subResult.Matches = false;
            }
            return(subResult);
        }
Esempio n. 8
0
        /// <summary>
        /// Sole constructor.
        /// </summary>
        /// <param name="subs"> The <seealso cref="Terms"/> instances of all sub-readers. </param>
        /// <param name="subSlices"> A parallel array (matching {@code
        ///        subs}) describing the sub-reader slices. </param>
        public MultiTerms(Terms[] subs, ReaderSlice[] subSlices)
        {
            this.Subs = subs;
            this.SubSlices = subSlices;

            IComparer<BytesRef> _termComp = null;
            Debug.Assert(subs.Length > 0, "inefficient: don't use MultiTerms over one sub");
            bool _hasFreqs = true;
            bool _hasOffsets = true;
            bool _hasPositions = true;
            bool _hasPayloads = false;
            for (int i = 0; i < subs.Length; i++)
            {
                if (_termComp == null)
                {
                    _termComp = subs[i].Comparator;
                }
                else
                {
                    // We cannot merge sub-readers that have
                    // different TermComps
                    IComparer<BytesRef> subTermComp = subs[i].Comparator;
                    if (subTermComp != null && !subTermComp.Equals(_termComp))
                    {
                        throw new InvalidOperationException("sub-readers have different BytesRef.Comparators; cannot merge");
                    }
                }
                _hasFreqs &= subs[i].HasFreqs();
                _hasOffsets &= subs[i].HasOffsets();
                _hasPositions &= subs[i].HasPositions();
                _hasPayloads |= subs[i].HasPayloads();
            }

            TermComp = _termComp;
            HasFreqs_Renamed = _hasFreqs;
            HasOffsets_Renamed = _hasOffsets;
            HasPositions_Renamed = _hasPositions;
            HasPayloads_Renamed = HasPositions_Renamed && _hasPayloads; // if all subs have pos, and at least one has payloads.
        }
Esempio n. 9
0
 public TermsEnumWithSlice(int index, ReaderSlice subSlice)
 {
     this.SubSlice = subSlice;
     this.Index    = index;
     Debug.Assert(subSlice.Length >= 0, "length=" + subSlice.Length);
 }
Esempio n. 10
0
 /// <summary>
 /// Expert: construct a new MultiFields instance directly.
 /// @lucene.internal
 /// </summary>
 // TODO: why is this public?
 public MultiFields(Fields[] subs, ReaderSlice[] subSlices)
 {
     this.Subs = subs;
     this.SubSlices = subSlices;
 }
Esempio n. 11
0
 /// <summary>
 /// Returns a sub-Bits matching the provided <code>slice</code>
 /// <p>
 /// Because <code>null</code> usually has a special meaning for
 /// Bits (e.g. no deleted documents), you must check
 /// <seealso cref="SubResult#matches"/> instead to ensure the sub was
 /// actually found.
 /// </summary>
 public SubResult GetMatchingSub(ReaderSlice slice)
 {
     int reader = ReaderUtil.SubIndex(slice.Start, Starts);
     Debug.Assert(reader != -1);
     Debug.Assert(reader < Subs.Length, "slice=" + slice + " starts[-1]=" + Starts[Starts.Length - 1]);
     SubResult subResult = new SubResult();
     if (Starts[reader] == slice.Start && Starts[1 + reader] == slice.Start + slice.Length)
     {
         subResult.Matches = true;
         subResult.Result = Subs[reader];
     }
     else
     {
         subResult.Matches = false;
     }
     return subResult;
 }
Esempio n. 12
0
 /// <summary>
 /// Creates an ordinal map that allows mapping ords to/from a merged
 /// space from <code>subs</code>. </summary>
 /// <param name="owner"> a cache key </param>
 /// <param name="subs"> TermsEnums that support <seealso cref="TermsEnum#ord()"/>. They need
 ///             not be dense (e.g. can be FilteredTermsEnums}. </param>
 /// <exception cref="IOException"> if an I/O error occurred. </exception>
 public OrdinalMap(object owner, TermsEnum[] subs)
 {
     // create the ordinal mappings by pulling a termsenum over each sub's
     // unique terms, and walking a multitermsenum over those
     this.Owner = owner;
     GlobalOrdDeltas = new MonotonicAppendingLongBuffer(PackedInts.COMPACT);
     FirstSegments = new AppendingPackedLongBuffer(PackedInts.COMPACT);
     OrdDeltas = new MonotonicAppendingLongBuffer[subs.Length];
     for (int i = 0; i < OrdDeltas.Length; i++)
     {
         OrdDeltas[i] = new MonotonicAppendingLongBuffer();
     }
     long[] segmentOrds = new long[subs.Length];
     ReaderSlice[] slices = new ReaderSlice[subs.Length];
     TermsEnumIndex[] indexes = new TermsEnumIndex[slices.Length];
     for (int i = 0; i < slices.Length; i++)
     {
         slices[i] = new ReaderSlice(0, 0, i);
         indexes[i] = new TermsEnumIndex(subs[i], i);
     }
     MultiTermsEnum mte = new MultiTermsEnum(slices);
     mte.Reset(indexes);
     long globalOrd = 0;
     while (mte.Next() != null)
     {
         TermsEnumWithSlice[] matches = mte.MatchArray;
         for (int i = 0; i < mte.MatchCount; i++)
         {
             int segmentIndex = matches[i].Index;
             long segmentOrd = matches[i].Terms.Ord();
             long delta = globalOrd - segmentOrd;
             // for each unique term, just mark the first segment index/delta where it occurs
             if (i == 0)
             {
                 FirstSegments.Add(segmentIndex);
                 GlobalOrdDeltas.Add(delta);
             }
             // for each per-segment ord, map it back to the global term.
             while (segmentOrds[segmentIndex] <= segmentOrd)
             {
                 OrdDeltas[segmentIndex].Add(delta);
                 segmentOrds[segmentIndex]++;
             }
         }
         globalOrd++;
     }
     FirstSegments.Freeze();
     GlobalOrdDeltas.Freeze();
     for (int i = 0; i < OrdDeltas.Length; ++i)
     {
         OrdDeltas[i].Freeze();
     }
 }