protected override MonotonicBlockPackedReader GetAddressInstance(IndexInput data, FieldInfo field,
     BinaryEntry bytes)
 {
     data.Seek(bytes.AddressesOffset);
     return new MonotonicBlockPackedReader((IndexInput)data.Clone(), bytes.PackedIntsVersion, bytes.BlockSize, bytes.Count,
         true);
 }
 protected override MonotonicBlockPackedReader GetOrdIndexInstance(IndexInput data, FieldInfo field,
     NumericEntry entry)
 {
     data.Seek(entry.Offset);
     return new MonotonicBlockPackedReader((IndexInput)data.Clone(), entry.PackedIntsVersion, entry.BlockSize, entry.Count,
         true);
 }
Example #3
0
 public SegmentTermDocs(IndexInput freqStream, TermInfosReader tis, FieldInfos fieldInfos)
 {
     this.FreqStream = (IndexInput)freqStream.Clone();
     this.Tis = tis;
     this.FieldInfos = fieldInfos;
     SkipInterval = tis.SkipInterval;
     MaxSkipLevels = tis.MaxSkipLevels;
 }
Example #4
0
        public override object Clone()
        {
            IndexInput cloneTvx = null;
            IndexInput cloneTvd = null;
            IndexInput cloneTvf = null;

            // These are null when a TermVectorsReader was created
            // on a segment that did not have term vectors saved
            if (tvx != null && tvd != null && tvf != null)
            {
                cloneTvx = (IndexInput)tvx.Clone();
                cloneTvd = (IndexInput)tvd.Clone();
                cloneTvf = (IndexInput)tvf.Clone();
            }

            return(new Lucene3xTermVectorsReader(fieldInfos, cloneTvx, cloneTvd, cloneTvf, size, numTotalDocs, docStoreOffset, format));
        }
        public override object Clone()
        {
            IndexInput cloneTvx = null;
            IndexInput cloneTvd = null;
            IndexInput cloneTvf = null;

            // These are null when a TermVectorsReader was created
            // on a segment that did not have term vectors saved
            if (Tvx != null && Tvd != null && Tvf != null)
            {
                cloneTvx = (IndexInput)Tvx.Clone();
                cloneTvd = (IndexInput)Tvd.Clone();
                cloneTvf = (IndexInput)Tvf.Clone();
            }

            return(new Lucene40TermVectorsReader(FieldInfos, cloneTvx, cloneTvd, cloneTvf, Size_Renamed, NumTotalDocs));
        }
Example #6
0
            public override object Clone()
            {
                FaultyIndexInput i = new FaultyIndexInput((IndexInput)@delegate.Clone());

                // seek the clone to our current position
                try
                {
                    i.Seek(GetFilePointer());
                }
#pragma warning disable 168
                catch (IOException e)
#pragma warning restore 168
                {
                    throw new Exception();
                }
                return(i);
            }
Example #7
0
        public virtual void TestClonedStreamsClosing()
        {
            SetUp_2();
            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);

            // basic clone
            IndexInput expected = Dir.OpenInput("f11", NewIOContext(Random()));

            // this test only works for FSIndexInput
            Assert.IsTrue(TestHelper.IsSimpleFSIndexInput(expected));
            Assert.IsTrue(TestHelper.IsSimpleFSIndexInputOpen(expected));

            IndexInput one = cr.OpenInput("f11", NewIOContext(Random()));

            IndexInput two = (IndexInput)one.Clone();

            AssertSameStreams("basic clone one", expected, one);
            expected.Seek(0);
            AssertSameStreams("basic clone two", expected, two);

            // Now close the first stream
            one.Dispose();

            // The following should really fail since we couldn't expect to
            // access a file once close has been called on it (regardless of
            // buffering and/or clone magic)
            expected.Seek(0);
            two.Seek(0);
            AssertSameStreams("basic clone two/2", expected, two);

            // Now close the compound reader
            cr.Dispose();

            // The following may also fail since the compound stream is closed
            expected.Seek(0);
            two.Seek(0);
            //assertSameStreams("basic clone two/3", expected, two);

            // Now close the second clone
            two.Dispose();
            expected.Seek(0);
            two.Seek(0);
            //assertSameStreams("basic clone two/4", expected, two);

            expected.Dispose();
        }
        public override NumericDocValues GetNumeric(FieldInfo fieldInfo)
        {
            var field = fields[fieldInfo.Name];

            if (Debugging.AssertsEnabled)
            {
                Debugging.Assert(field != null);

                // SegmentCoreReaders already verifies this field is valid:
                Debugging.Assert(field != null, "field={0} fields={1}", fieldInfo.Name, fields);
            }

            var @in     = (IndexInput)data.Clone();
            var scratch = new BytesRef();

            return(new NumericDocValuesAnonymousClass(this, field, @in, scratch));
        }
        public override NumericDocValues GetNumeric(FieldInfo fieldInfo)
        {
            var field = fields[fieldInfo.Name];

            Debug.Assert(field != null);

            // SegmentCoreReaders already verifies this field is valid:
            Debug.Assert(field != null, "field=" + fieldInfo.Name + " fields=" + fields);

            var @in     = (IndexInput)data.Clone();
            var scratch = new BytesRef();

            return(new NumericDocValuesAnonymousInnerClassHelper(this, field, @in, scratch));
        }
Example #10
0
        public SimpleTextFieldsReader(SegmentReadState state)
        {
            this.maxDoc = state.SegmentInfo.DocCount;
            fieldInfos  = state.FieldInfos;
            input       = state.Directory.OpenInput(SimpleTextPostingsFormat.GetPostingsFileName(state.SegmentInfo.Name, state.SegmentSuffix), state.Context);
            bool success = false;

            try
            {
                fields  = ReadFields((IndexInput)input.Clone());
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.DisposeWhileHandlingException(this);
                }
            }
        }
Example #11
0
        public System.Object Clone()
        {
            SegmentTermEnum clone = null;

            try
            {
                clone = (SegmentTermEnum)base.MemberwiseClone();
            }
            catch (System.Exception)
            {
            }

            clone.input    = (IndexInput)input.Clone();
            clone.termInfo = new TermInfo(termInfo);

            clone.termBuffer = (TermBuffer)termBuffer.Clone();
            clone.prevBuffer = (TermBuffer)prevBuffer.Clone();
            clone.scanBuffer = new TermBuffer();

            return(clone);
        }
Example #12
0
        public object Clone()
        {
            SegmentTermEnum clone = null;

            try
            {
                clone = (SegmentTermEnum)base.MemberwiseClone();
            }
            catch (InvalidOperationException e)
            {
            }

            clone.Input            = (IndexInput)Input.Clone();
            clone.TermInfo_Renamed = new TermInfo(TermInfo_Renamed);

            clone.TermBuffer = (TermBuffer)TermBuffer.Clone();
            clone.PrevBuffer = (TermBuffer)PrevBuffer.Clone();
            clone.ScanBuffer = new TermBuffer();

            return(clone);
        }
Example #13
0
        public object Clone()
        {
            SegmentTermEnum clone = null;

            try
            {
                clone = (SegmentTermEnum)base.MemberwiseClone();
            }
#pragma warning disable 168
            catch (InvalidOperationException e)
#pragma warning restore 168
            {
            }

            clone.input    = (IndexInput)input.Clone();
            clone.termInfo = new TermInfo(termInfo);

            clone.termBuffer = (TermBuffer)termBuffer.Clone();
            clone.prevBuffer = (TermBuffer)prevBuffer.Clone();
            clone.scanBuffer = new TermBuffer();

            return(clone);
        }
Example #14
0
        /// <summary>
        /// Optimized implementation. </summary>
        public virtual bool SkipTo(int target)
        {
            // don't skip if the target is close (within skipInterval docs away)
            if ((target - skipInterval) >= doc && m_df >= skipInterval) // optimized case
            {
                if (skipListReader == null)
                {
                    skipListReader = new Lucene3xSkipListReader((IndexInput)m_freqStream.Clone(), maxSkipLevels, skipInterval); // lazily clone
                }

                if (!haveSkipped) // lazily initialize skip stream
                {
                    skipListReader.Init(skipPointer, freqBasePointer, proxBasePointer, m_df, m_currentFieldStoresPayloads);
                    haveSkipped = true;
                }

                int newCount = skipListReader.SkipTo(target);
                if (newCount > m_count)
                {
                    m_freqStream.Seek(skipListReader.FreqPointer);
                    SkipProx(skipListReader.ProxPointer, skipListReader.PayloadLength);

                    doc     = skipListReader.Doc;
                    m_count = newCount;
                }
            }

            // done skipping, now just scan
            do
            {
                if (!Next())
                {
                    return(false);
                }
            } while (target > doc);
            return(true);
        }
Example #15
0
        /// <summary>Optimized implementation. </summary>
        public virtual bool SkipTo(int target)
        {
            if (df >= skipInterval)
            {
                // optimized case
                if (skipListReader == null)
                {
                    skipListReader = new DefaultSkipListReader((IndexInput)freqStream.Clone(), maxSkipLevels, skipInterval);                      // lazily clone
                }
                if (!haveSkipped)
                {
                    // lazily initialize skip stream
                    skipListReader.Init(skipPointer, freqBasePointer, proxBasePointer, df, currentFieldStoresPayloads);
                    haveSkipped = true;
                }

                int newCount = skipListReader.SkipTo(target);
                if (newCount > count)
                {
                    freqStream.Seek(skipListReader.GetFreqPointer());
                    SkipProx(skipListReader.GetProxPointer(), skipListReader.GetPayloadLength());

                    doc   = skipListReader.GetDoc();
                    count = newCount;
                }
            }

            // done skipping, now just scan
            do
            {
                if (!Next())
                {
                    return(false);
                }
            }while (target > doc);
            return(true);
        }
Example #16
0
            private int SkipTo(int target)
            {
                if ((target - outerInstance.skipInterval) >= m_accum && m_limit >= outerInstance.skipMinimum)
                {
                    // There are enough docs in the posting to have
                    // skip data, and it isn't too close.

                    if (skipper == null)
                    {
                        // this is the first time this enum has ever been used for skipping -- do lazy init
                        skipper = new Lucene40SkipListReader((IndexInput)freqIn.Clone(), outerInstance.maxSkipLevels, outerInstance.skipInterval);
                    }

                    if (!m_skipped)
                    {
                        // this is the first time this posting has
                        // skipped since reset() was called, so now we
                        // load the skip data for this posting

                        skipper.Init(m_freqOffset + m_skipOffset, m_freqOffset, 0, m_limit, m_storePayloads, m_storeOffsets);

                        m_skipped = true;
                    }

                    int newOrd = skipper.SkipTo(target);

                    if (newOrd > m_ord)
                    {
                        // Skipper moved

                        m_ord   = newOrd;
                        m_accum = skipper.Doc;
                        freqIn.Seek(skipper.FreqPointer);
                    }
                }
                return(ScanTo(target));
            }
            internal int SkipTo(int target)
            {
                if ((target - OuterInstance.SkipInterval) >= Accum && Limit >= OuterInstance.SkipMinimum)
                {
                    // There are enough docs in the posting to have
                    // skip data, and it isn't too close.

                    if (Skipper == null)
                    {
                        // this is the first time this enum has ever been used for skipping -- do lazy init
                        Skipper = new Lucene40SkipListReader((IndexInput)FreqIn.Clone(), OuterInstance.MaxSkipLevels, OuterInstance.SkipInterval);
                    }

                    if (!Skipped)
                    {
                        // this is the first time this posting has
                        // skipped since reset() was called, so now we
                        // load the skip data for this posting

                        Skipper.Init(FreqOffset + SkipOffset, FreqOffset, 0, Limit, StorePayloads, StoreOffsets);

                        Skipped = true;
                    }

                    int newOrd = Skipper.SkipTo(target);

                    if (newOrd > Ord)
                    {
                        // Skipper moved

                        Ord   = newOrd;
                        Accum = Skipper.Doc;
                        FreqIn.Seek(Skipper.FreqPointer);
                    }
                }
                return(ScanTo(target));
            }
Example #18
0
        /// <summary>
        /// Optimized implementation. </summary>
        public virtual bool SkipTo(int target)
        {
            // don't skip if the target is close (within skipInterval docs away)
            if ((target - SkipInterval) >= Doc_Renamed && Df >= SkipInterval) // optimized case
            {
                if (SkipListReader == null)
                {
                    SkipListReader = new Lucene3xSkipListReader((IndexInput)FreqStream.Clone(), MaxSkipLevels, SkipInterval); // lazily clone
                }

                if (!HaveSkipped) // lazily initialize skip stream
                {
                    SkipListReader.Init(SkipPointer, FreqBasePointer, ProxBasePointer, Df, CurrentFieldStoresPayloads);
                    HaveSkipped = true;
                }

                int newCount = SkipListReader.SkipTo(target);
                if (newCount > Count)
                {
                    FreqStream.Seek(SkipListReader.FreqPointer);
                    SkipProx(SkipListReader.ProxPointer, SkipListReader.PayloadLength);

                    Doc_Renamed = SkipListReader.Doc;
                    Count       = newCount;
                }
            }

            // done skipping, now just scan
            do
            {
                if (!Next())
                {
                    return(false);
                }
            } while (target > Doc_Renamed);
            return(true);
        }
Example #19
0
        // It is not always necessary to move the prox pointer
        // to a new document after the freq pointer has been moved.
        // Consider for example a phrase query with two terms:
        // the freq pointer for term 1 has to move to document x
        // to answer the question if the term occurs in that document. But
        // only if term 2 also matches document x, the positions have to be
        // read to figure out if term 1 and term 2 appear next
        // to each other in document x and thus satisfy the query.
        // So we move the prox pointer lazily to the document
        // as soon as positions are requested.
        private void LazySkip()
        {
            if (proxStream == null)
            {
                // clone lazily
                proxStream = (IndexInput)proxStreamOrig.Clone();
            }

            // we might have to skip the current payload
            // if it was not read yet
            SkipPayload();

            if (lazySkipPointer != -1)
            {
                proxStream.Seek(lazySkipPointer);
                lazySkipPointer = -1;
            }

            if (lazySkipProxCount != 0)
            {
                SkipPositions(lazySkipProxCount);
                lazySkipProxCount = 0;
            }
        }
Example #20
0
        public virtual void TestDataInputOutput()
        {
            Random random = Random();

            for (int iter = 0; iter < 5 * RANDOM_MULTIPLIER; iter++)
            {
                BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("testOverflow"));
                if (dir is MockDirectoryWrapper)
                {
                    ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
                }
                int         blockBits = TestUtil.NextInt(random, 1, 20);
                int         blockSize = 1 << blockBits;
                PagedBytes  p         = new PagedBytes(blockBits);
                IndexOutput @out      = dir.CreateOutput("foo", IOContext.DEFAULT);
                int         numBytes  = TestUtil.NextInt(Random(), 2, 10000000);

                byte[] answer = new byte[numBytes];
                Random().NextBytes(answer);
                int written = 0;
                while (written < numBytes)
                {
                    if (Random().Next(10) == 7)
                    {
                        @out.WriteByte(answer[written++]);
                    }
                    else
                    {
                        int chunk = Math.Min(Random().Next(1000), numBytes - written);
                        @out.WriteBytes(answer, written, chunk);
                        written += chunk;
                    }
                }

                @out.Dispose();
                IndexInput input = dir.OpenInput("foo", IOContext.DEFAULT);
                DataInput  @in   = (DataInput)input.Clone();

                p.Copy(input, input.Length());
                PagedBytes.Reader reader = p.Freeze(random.NextBoolean());

                byte[] verify = new byte[numBytes];
                int    read   = 0;
                while (read < numBytes)
                {
                    if (Random().Next(10) == 7)
                    {
                        verify[read++] = @in.ReadByte();
                    }
                    else
                    {
                        int chunk = Math.Min(Random().Next(1000), numBytes - read);
                        @in.ReadBytes(verify, read, chunk);
                        read += chunk;
                    }
                }
                Assert.IsTrue(Arrays.Equals(answer, verify));

                BytesRef slice = new BytesRef();
                for (int iter2 = 0; iter2 < 100; iter2++)
                {
                    int pos = random.Next(numBytes - 1);
                    int len = random.Next(Math.Min(blockSize + 1, numBytes - pos));
                    reader.FillSlice(slice, pos, len);
                    for (int byteUpto = 0; byteUpto < len; byteUpto++)
                    {
                        Assert.AreEqual(answer[pos + byteUpto], (byte)slice.Bytes[slice.Offset + byteUpto]);
                    }
                }
                input.Dispose();
                dir.Dispose();
            }
        }
Example #21
0
        internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size)
        {
            bool success = false;

            isOriginal = true;
            try
            {
                fieldInfos = fn;

                cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize);
                cloneableIndexStream  = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize);

                // First version of fdx did not include a format
                // header, but, the first int will always be 0 in that
                // case
                int firstInt = cloneableIndexStream.ReadInt();
                format = firstInt == 0 ? 0 : firstInt;

                if (format > FieldsWriter.FORMAT_CURRENT)
                {
                    throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower");
                }

                formatSize = format > FieldsWriter.FORMAT ? 4 : 0;

                if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
                {
                    cloneableFieldsStream.SetModifiedUTF8StringsMode();
                }

                fieldsStream = (IndexInput)cloneableFieldsStream.Clone();

                long indexSize = cloneableIndexStream.Length() - formatSize;

                if (docStoreOffset != -1)
                {
                    // We read only a slice out of this shared fields file
                    this.docStoreOffset = docStoreOffset;
                    this.size           = size;

                    // Verify the file is long enough to hold all of our
                    // docs
                    System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
                }
                else
                {
                    this.docStoreOffset = 0;
                    this.size           = (int)(indexSize >> 3);
                }

                indexStream  = (IndexInput)cloneableIndexStream.Clone();
                numTotalDocs = (int)(indexSize >> 3);
                success      = true;
            }
            finally
            {
                // With lock-less commits, it's entirely possible (and
                // fine) to hit a FileNotFound exception above. In
                // this case, we want to explicitly close any subset
                // of things that were opened so that we don't have to
                // wait for a GC to do so.
                if (!success)
                {
                    Dispose();
                }
            }
        }
Example #22
0
 public override Reader GetReader()
 {
     return(new MockReader((IndexInput)@in.Clone()));
 }
Example #23
0
        public virtual void TestRandomAccessClones()
        {
            SetUp_2();
            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), false);

            // Open two files
            IndexInput e1 = cr.OpenInput("f11", NewIOContext(Random));
            IndexInput e2 = cr.OpenInput("f3", NewIOContext(Random));

            IndexInput a1 = (IndexInput)e1.Clone();
            IndexInput a2 = (IndexInput)e2.Clone();

            // Seek the first pair
            e1.Seek(100);
            a1.Seek(100);
            Assert.AreEqual(100, e1.GetFilePointer());
            Assert.AreEqual(100, a1.GetFilePointer());
            byte be1 = e1.ReadByte();
            byte ba1 = a1.ReadByte();

            Assert.AreEqual(be1, ba1);

            // Now seek the second pair
            e2.Seek(1027);
            a2.Seek(1027);
            Assert.AreEqual(1027, e2.GetFilePointer());
            Assert.AreEqual(1027, a2.GetFilePointer());
            byte be2 = e2.ReadByte();
            byte ba2 = a2.ReadByte();

            Assert.AreEqual(be2, ba2);

            // Now make sure the first one didn't move
            Assert.AreEqual(101, e1.GetFilePointer());
            Assert.AreEqual(101, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            // Now more the first one again, past the buffer length
            e1.Seek(1910);
            a1.Seek(1910);
            Assert.AreEqual(1910, e1.GetFilePointer());
            Assert.AreEqual(1910, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            // Now make sure the second set didn't move
            Assert.AreEqual(1028, e2.GetFilePointer());
            Assert.AreEqual(1028, a2.GetFilePointer());
            be2 = e2.ReadByte();
            ba2 = a2.ReadByte();
            Assert.AreEqual(be2, ba2);

            // Move the second set back, again cross the buffer size
            e2.Seek(17);
            a2.Seek(17);
            Assert.AreEqual(17, e2.GetFilePointer());
            Assert.AreEqual(17, a2.GetFilePointer());
            be2 = e2.ReadByte();
            ba2 = a2.ReadByte();
            Assert.AreEqual(be2, ba2);

            // Finally, make sure the first set didn't move
            // Now make sure the first one didn't move
            Assert.AreEqual(1911, e1.GetFilePointer());
            Assert.AreEqual(1911, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            e1.Dispose();
            e2.Dispose();
            a1.Dispose();
            a2.Dispose();
            cr.Dispose();
        }
 // NOTE: tvf is pre-positioned by caller
 public TVTermsEnum(Lucene40TermVectorsReader outerInstance)
 {
     this.OuterInstance = outerInstance;
     this.OrigTVF = outerInstance.Tvf;
     Tvf = (IndexInput)OrigTVF.Clone();
 }
Example #25
0
 // TODO: is it intentional that clone doesnt wrap?
 public override object Clone()
 {
     return(ii.Clone());
 }
Example #26
0
 /// <summary>
 /// Returns a cloned FieldsReader that shares open
 ///  IndexInputs with the original one.  It is the caller's
 ///  job not to close the original FieldsReader until all
 ///  clones are called (eg, currently SegmentReader manages
 ///  this logic).
 /// </summary>
 public override object Clone()
 {
     EnsureOpen();
     return(new Lucene40StoredFieldsReader(FieldInfos, NumTotalDocs, Size_Renamed, (IndexInput)FieldsStream.Clone(), (IndexInput)IndexStream.Clone()));
 }
 // NOTE: tvf is pre-positioned by caller
 public TVTermsEnum(Lucene3xTermVectorsReader outerInstance)
 {
     this.OuterInstance = outerInstance;
     this.OrigTVF       = outerInstance.Tvf;
     Tvf = (IndexInput)OrigTVF.Clone();
 }
Example #28
0
 internal CSIndexInput(IndexInput @base, long fileOffset, long length, int readBufferSize) : base(readBufferSize)
 {
     this.base_Renamed = (IndexInput)@base.Clone();
     this.fileOffset   = fileOffset;
     this.length       = length;
 }
 public SimpleTextDocsAndPositionsEnum(SimpleTextFieldsReader outerInstance)
 {
     _inStart = outerInstance._input;
     _in      = (IndexInput)_inStart.Clone();
 }
Example #30
0
 /// <summary>
 /// Returns a cloned FieldsReader that shares open
 /// IndexInputs with the original one.  It is the caller's
 /// job not to close the original FieldsReader until all
 /// clones are called (eg, currently SegmentReader manages
 /// this logic).
 /// </summary>
 public override object Clone()
 {
     EnsureOpen();
     return(new Lucene3xStoredFieldsReader(fieldInfos, numTotalDocs, size, format, docStoreOffset, (IndexInput)fieldsStream.Clone(), (IndexInput)indexStream.Clone()));
 }
            //private boolean DEBUG;

            internal FieldReader(BlockTreeTermsReader outerInstance, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, long indexStartFP, int longsSize, IndexInput indexIn)
            {
                this.OuterInstance = outerInstance;
                Debug.Assert(numTerms > 0);
                this.fieldInfo = fieldInfo;
                //DEBUG = BlockTreeTermsReader.DEBUG && fieldInfo.name.equals("id");
                this.NumTerms = numTerms;
                this.SumTotalTermFreq_Renamed = sumTotalTermFreq;
                this.SumDocFreq_Renamed = sumDocFreq;
                this.DocCount_Renamed = docCount;
                this.IndexStartFP = indexStartFP;
                this.RootCode = rootCode;
                this.LongsSize = longsSize;
                // if (DEBUG) {
                //   System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor);
                // }

                RootBlockFP = (int)((uint)(new ByteArrayDataInput((byte[])(Array)rootCode.Bytes, rootCode.Offset, rootCode.Length)).ReadVLong() >> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS);

                if (indexIn != null)
                {
                    IndexInput clone = (IndexInput)indexIn.Clone();
                    //System.out.println("start=" + indexStartFP + " field=" + fieldInfo.name);
                    clone.Seek(indexStartFP);
                    Index = new FST<BytesRef>(clone, ByteSequenceOutputs.Singleton);

                    /*
                    if (false) {
                      final String dotFileName = segment + "_" + fieldInfo.name + ".dot";
                      Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName));
                      Util.toDot(index, w, false, false);
                      System.out.println("FST INDEX: SAVED to " + dotFileName);
                      w.close();
                    }
                    */
                }
                else
                {
                    Index = null;
                }
            }
Example #32
0
        public override int Merge(MergeState mergeState)
        {
            int docCount = 0;
            int idx      = 0;

            foreach (AtomicReader reader in mergeState.Readers)
            {
                SegmentReader matchingSegmentReader = mergeState.MatchingSegmentReaders[idx++];
                CompressingTermVectorsReader matchingVectorsReader = null;
                if (matchingSegmentReader != null)
                {
                    TermVectorsReader vectorsReader = matchingSegmentReader.TermVectorsReader;
                    // we can only bulk-copy if the matching reader is also a CompressingTermVectorsReader
                    if (vectorsReader != null && vectorsReader is CompressingTermVectorsReader)
                    {
                        matchingVectorsReader = (CompressingTermVectorsReader)vectorsReader;
                    }
                }

                int  maxDoc   = reader.MaxDoc;
                Bits liveDocs = reader.LiveDocs;

                if (matchingVectorsReader == null || matchingVectorsReader.Version != VERSION_CURRENT || matchingVectorsReader.CompressionMode != CompressionMode || matchingVectorsReader.ChunkSize != ChunkSize || matchingVectorsReader.PackedIntsVersion != PackedInts.VERSION_CURRENT)
                {
                    // naive merge...
                    for (int i = NextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; i = NextLiveDoc(i + 1, liveDocs, maxDoc))
                    {
                        Fields vectors = reader.GetTermVectors(i);
                        AddAllDocVectors(vectors, mergeState);
                        ++docCount;
                        mergeState.checkAbort.Work(300);
                    }
                }
                else
                {
                    CompressingStoredFieldsIndexReader index = matchingVectorsReader.Index;
                    IndexInput vectorsStreamOrig             = matchingVectorsReader.VectorsStream;
                    vectorsStreamOrig.Seek(0);
                    ChecksumIndexInput vectorsStream = new BufferedChecksumIndexInput((IndexInput)vectorsStreamOrig.Clone());

                    for (int i = NextLiveDoc(0, liveDocs, maxDoc); i < maxDoc;)
                    {
                        // We make sure to move the checksum input in any case, otherwise the final
                        // integrity check might need to read the whole file a second time
                        long startPointer = index.GetStartPointer(i);
                        if (startPointer > vectorsStream.FilePointer)
                        {
                            vectorsStream.Seek(startPointer);
                        }
                        if ((PendingDocs.Count == 0) && (i == 0 || index.GetStartPointer(i - 1) < startPointer)) // start of a chunk
                        {
                            int docBase   = vectorsStream.ReadVInt();
                            int chunkDocs = vectorsStream.ReadVInt();
                            Debug.Assert(docBase + chunkDocs <= matchingSegmentReader.MaxDoc);
                            if (docBase + chunkDocs < matchingSegmentReader.MaxDoc && NextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs)
                            {
                                long chunkEnd    = index.GetStartPointer(docBase + chunkDocs);
                                long chunkLength = chunkEnd - vectorsStream.FilePointer;
                                IndexWriter.WriteIndex(chunkDocs, this.VectorsStream.FilePointer);
                                this.VectorsStream.WriteVInt(docCount);
                                this.VectorsStream.WriteVInt(chunkDocs);
                                this.VectorsStream.CopyBytes(vectorsStream, chunkLength);
                                docCount     += chunkDocs;
                                this.NumDocs += chunkDocs;
                                mergeState.checkAbort.Work(300 * chunkDocs);
                                i = NextLiveDoc(docBase + chunkDocs, liveDocs, maxDoc);
                            }
                            else
                            {
                                for (; i < docBase + chunkDocs; i = NextLiveDoc(i + 1, liveDocs, maxDoc))
                                {
                                    Fields vectors = reader.GetTermVectors(i);
                                    AddAllDocVectors(vectors, mergeState);
                                    ++docCount;
                                    mergeState.checkAbort.Work(300);
                                }
                            }
                        }
                        else
                        {
                            Fields vectors = reader.GetTermVectors(i);
                            AddAllDocVectors(vectors, mergeState);
                            ++docCount;
                            mergeState.checkAbort.Work(300);
                            i = NextLiveDoc(i + 1, liveDocs, maxDoc);
                        }
                    }

                    vectorsStream.Seek(vectorsStream.Length() - CodecUtil.FooterLength());
                    CodecUtil.CheckFooter(vectorsStream);
                }
            }
            Finish(mergeState.FieldInfos, docCount);
            return(docCount);
        }
Example #33
0
 public SimpleTextDocsAndPositionsEnum(SimpleTextFieldsReader outerInstance)
 {
     this.outerInstance = outerInstance;
     this.inStart       = outerInstance._input;
     this.@in           = (IndexInput)inStart.Clone();
 }
 internal SegmentDocsEnumBase(Lucene40PostingsReader outerInstance, IndexInput startFreqIn, Bits liveDocs)
 {
     this.OuterInstance = outerInstance;
     this.StartFreqIn = startFreqIn;
     this.FreqIn = (IndexInput)startFreqIn.Clone();
     this.LiveDocs = liveDocs;
 }
 public SegmentDocsAndPositionsEnum(Lucene40PostingsReader outerInstance, IndexInput freqIn, IndexInput proxIn)
 {
     this.OuterInstance = outerInstance;
     StartFreqIn = freqIn;
     this.FreqIn = (IndexInput)freqIn.Clone();
     this.ProxIn = (IndexInput)proxIn.Clone();
 }
Example #36
0
 // NOTE: tvf is pre-positioned by caller
 public TVTermsEnum(Lucene40TermVectorsReader outerInstance)
 {
     this.outerInstance = outerInstance;
     this.origTVF       = outerInstance.tvf;
     tvf = (IndexInput)origTVF.Clone();
 }
 /// <summary>
 /// Returns a cloned FieldsReader that shares open
 ///  IndexInputs with the original one.  It is the caller's
 ///  job not to close the original FieldsReader until all
 ///  clones are called (eg, currently SegmentReader manages
 ///  this logic).
 /// </summary>
 public override object Clone()
 {
     EnsureOpen();
     return(new Lucene3xStoredFieldsReader(FieldInfos, NumTotalDocs, Size, Format, DocStoreOffset, (IndexInput)FieldsStream.Clone(), (IndexInput)IndexStream.Clone()));
 }