public SegmentTermDocs(IndexInput freqStream, TermInfosReader tis, FieldInfos fieldInfos) { this.FreqStream = (IndexInput)freqStream.Clone(); this.Tis = tis; this.FieldInfos = fieldInfos; SkipInterval = tis.SkipInterval; MaxSkipLevels = tis.MaxSkipLevels; }
public override System.Object Clone() { CSIndexInput clone = (CSIndexInput)base.Clone(); clone.base_Renamed = (IndexInput)base_Renamed.Clone(); clone.fileOffset = fileOffset; clone.length = length; return(clone); }
public FieldsReader(Directory d, System.String segment, FieldInfos fn) { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + ".fdt"); fieldsStream = (IndexInput)cloneableFieldsStream.Clone(); indexStream = d.OpenInput(segment + ".fdx"); size = (int)(indexStream.Length() / 8); }
public FieldsReader(Directory d, System.String segment, FieldInfos fn) { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + ".fdt"); fieldsStream = (IndexInput) cloneableFieldsStream.Clone(); indexStream = d.OpenInput(segment + ".fdx"); size = (int) (indexStream.Length() / 8); }
public virtual void TestClonedStreamsClosing() { SetUp_2(); CompoundFileReader cr = new CompoundFileReader(dir, "f.comp"); // basic clone IndexInput expected = dir.OpenInput("f11"); // this test only works for FSIndexInput Assert.IsTrue(_TestHelper.IsSimpleFSIndexInput(expected)); Assert.IsTrue(_TestHelper.IsSimpleFSIndexInputOpen(expected)); IndexInput one = cr.OpenInput("f11"); Assert.IsTrue(IsCSIndexInputOpen(one)); IndexInput two = (IndexInput)one.Clone(); Assert.IsTrue(IsCSIndexInputOpen(two)); AssertSameStreams("basic clone one", expected, one); expected.Seek(0); AssertSameStreams("basic clone two", expected, two); // Now close the first stream one.Close(); Assert.IsTrue(IsCSIndexInputOpen(one), "Only close when cr is closed"); // The following should really fail since we couldn't expect to // access a file once close has been called on it (regardless of // buffering and/or clone magic) expected.Seek(0); two.Seek(0); AssertSameStreams("basic clone two/2", expected, two); // Now close the compound reader cr.Close(); Assert.IsFalse(IsCSIndexInputOpen(one), "Now closed one"); Assert.IsFalse(IsCSIndexInputOpen(two), "Now closed two"); // The following may also fail since the compound stream is closed expected.Seek(0); two.Seek(0); //assertSameStreams("basic clone two/3", expected, two); // Now close the second clone two.Close(); expected.Seek(0); two.Seek(0); //assertSameStreams("basic clone two/4", expected, two); expected.Close(); }
// Used only by clone private FieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int formatSize, int docStoreOffset, IndexInput cloneableFieldsStream, IndexInput cloneableIndexStream) { this.fieldInfos = fieldInfos; this.numTotalDocs = numTotalDocs; this.size = size; this.format = format; this.formatSize = formatSize; this.docStoreOffset = docStoreOffset; this.cloneableFieldsStream = cloneableFieldsStream; this.cloneableIndexStream = cloneableIndexStream; fieldsStream = (IndexInput) cloneableFieldsStream.Clone(); indexStream = (IndexInput) cloneableIndexStream.Clone(); }
// Used only by clone private FieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int formatSize, int docStoreOffset, IndexInput cloneableFieldsStream, IndexInput cloneableIndexStream) { this.fieldInfos = fieldInfos; this.numTotalDocs = numTotalDocs; this.size = size; this.format = format; this.formatSize = formatSize; this.docStoreOffset = docStoreOffset; this.cloneableFieldsStream = cloneableFieldsStream; this.cloneableIndexStream = cloneableIndexStream; fieldsStream = (IndexInput)cloneableFieldsStream.Clone(); indexStream = (IndexInput)cloneableIndexStream.Clone(); }
public virtual System.Object Clone() { TermVectorsReader clone = (TermVectorsReader)base.MemberwiseClone(); // These are null when a TermVectorsReader was created // on a segment that did not have term vectors saved if (tvx != null && tvd != null && tvf != null) { clone.tvx = (IndexInput)tvx.Clone(); clone.tvd = (IndexInput)tvd.Clone(); clone.tvf = (IndexInput)tvf.Clone(); } return(clone); }
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size) { bool success = false; try { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + ".fdt", readBufferSize); fieldsStream = (IndexInput) cloneableFieldsStream.Clone(); indexStream = d.OpenInput(segment + ".fdx", readBufferSize); if (docStoreOffset != - 1) { // We read only a slice out of this shared fields file this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs System.Diagnostics.Debug.Assert(((int)(indexStream.Length() / 8)) >= size + this.docStoreOffset); } else { this.docStoreOffset = 0; this.size = (int) (indexStream.Length() >> 3); } numTotalDocs = (int) (indexStream.Length() >> 3); success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { Close(); } } }
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size) { bool success = false; try { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + ".fdt", readBufferSize); fieldsStream = (IndexInput)cloneableFieldsStream.Clone(); indexStream = d.OpenInput(segment + ".fdx", readBufferSize); if (docStoreOffset != -1) { // We read only a slice out of this shared fields file this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs System.Diagnostics.Debug.Assert(((int)(indexStream.Length() / 8)) >= size + this.docStoreOffset); } else { this.docStoreOffset = 0; this.size = (int)(indexStream.Length() >> 3); } numTotalDocs = (int)(indexStream.Length() >> 3); success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { Close(); } } }
public object Clone() { SegmentTermEnum clone = null; try { clone = (SegmentTermEnum)base.MemberwiseClone(); } catch (System.Exception) { } clone.input = (IndexInput)input.Clone(); clone.termInfo = new TermInfo(termInfo); clone.termBuffer = (TermBuffer)termBuffer.Clone(); clone.prevBuffer = (TermBuffer)prevBuffer.Clone(); clone.scanBuffer = new TermBuffer(); return(clone); }
public virtual System.Object Clone() { if (tvx == null || tvd == null || tvf == null) { return(null); } TermVectorsReader clone = null; try { clone = (TermVectorsReader)base.MemberwiseClone(); } catch (System.Exception) { } clone.tvx = (IndexInput)tvx.Clone(); clone.tvd = (IndexInput)tvd.Clone(); clone.tvf = (IndexInput)tvf.Clone(); return(clone); }
/// <summary>Optimized implementation. </summary> public virtual bool SkipTo(int target) { if (df >= skipInterval) { // optimized case if (skipListReader == null) { skipListReader = new DefaultSkipListReader((IndexInput)freqStream.Clone(), maxSkipLevels, skipInterval); // lazily clone } if (!haveSkipped) { // lazily initialize skip stream skipListReader.Init(skipPointer, freqBasePointer, proxBasePointer, df, currentFieldStoresPayloads); haveSkipped = true; } int newCount = skipListReader.SkipTo(target); if (newCount > count) { freqStream.Seek(skipListReader.GetFreqPointer()); SkipProx(skipListReader.GetProxPointer(), skipListReader.GetPayloadLength()); doc = skipListReader.GetDoc(); count = newCount; } } // done skipping, now just scan do { if (!Next()) { return(false); } }while (target > doc); return(true); }
internal CSIndexInput(IndexInput base_Renamed, long fileOffset, long length, int readBufferSize) : base(readBufferSize) { this.base_Renamed = (IndexInput)base_Renamed.Clone(); this.fileOffset = fileOffset; this.length = length; }
public virtual void TestRandomAccessClones() { SetUp_2(); CompoundFileReader cr = new CompoundFileReader(dir, "f.comp"); // Open two files IndexInput e1 = cr.OpenInput("f11"); IndexInput e2 = cr.OpenInput("f3"); IndexInput a1 = (IndexInput)e1.Clone(); IndexInput a2 = (IndexInput)e2.Clone(); // Seek the first pair e1.Seek(100); a1.Seek(100); Assert.AreEqual(100, e1.GetFilePointer()); Assert.AreEqual(100, a1.GetFilePointer()); byte be1 = e1.ReadByte(); byte ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); // Now seek the second pair e2.Seek(1027); a2.Seek(1027); Assert.AreEqual(1027, e2.GetFilePointer()); Assert.AreEqual(1027, a2.GetFilePointer()); byte be2 = e2.ReadByte(); byte ba2 = a2.ReadByte(); Assert.AreEqual(be2, ba2); // Now make sure the first one didn't move Assert.AreEqual(101, e1.GetFilePointer()); Assert.AreEqual(101, a1.GetFilePointer()); be1 = e1.ReadByte(); ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); // Now more the first one again, past the buffer length e1.Seek(1910); a1.Seek(1910); Assert.AreEqual(1910, e1.GetFilePointer()); Assert.AreEqual(1910, a1.GetFilePointer()); be1 = e1.ReadByte(); ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); // Now make sure the second set didn't move Assert.AreEqual(1028, e2.GetFilePointer()); Assert.AreEqual(1028, a2.GetFilePointer()); be2 = e2.ReadByte(); ba2 = a2.ReadByte(); Assert.AreEqual(be2, ba2); // Move the second set back, again cross the buffer size e2.Seek(17); a2.Seek(17); Assert.AreEqual(17, e2.GetFilePointer()); Assert.AreEqual(17, a2.GetFilePointer()); be2 = e2.ReadByte(); ba2 = a2.ReadByte(); Assert.AreEqual(be2, ba2); // Finally, make sure the first set didn't move // Now make sure the first one didn't move Assert.AreEqual(1911, e1.GetFilePointer()); Assert.AreEqual(1911, a1.GetFilePointer()); be1 = e1.ReadByte(); ba1 = a1.ReadByte(); Assert.AreEqual(be1, ba1); e1.Close(); e2.Close(); a1.Close(); a2.Close(); cr.Close(); }
internal CSIndexInput(IndexInput base_Renamed, long fileOffset, long length, int readBufferSize):base(readBufferSize) { this.base_Renamed = (IndexInput) base_Renamed.Clone(); this.fileOffset = fileOffset; this.length = length; }
/// <summary>Optimized implementation. </summary> public virtual bool SkipTo(int target) { if (df >= skipInterval) { // optimized case if (skipStream == null) { skipStream = (IndexInput)freqStream.Clone(); // lazily clone } if (!haveSkipped) { // lazily seek skip stream skipStream.Seek(skipPointer); haveSkipped = true; } // scan skip data int lastSkipDoc = skipDoc; long lastFreqPointer = freqStream.GetFilePointer(); long lastProxPointer = -1; int numSkipped = -1 - (count % skipInterval); while (target > skipDoc) { lastSkipDoc = skipDoc; lastFreqPointer = freqPointer; lastProxPointer = proxPointer; if (skipDoc != 0 && skipDoc >= doc) { numSkipped += skipInterval; } if (skipCount >= numSkips) { break; } skipDoc += skipStream.ReadVInt(); freqPointer += skipStream.ReadVInt(); proxPointer += skipStream.ReadVInt(); skipCount++; } // if we found something to skip, then skip it if (lastFreqPointer > freqStream.GetFilePointer()) { freqStream.Seek(lastFreqPointer); SkipProx(lastProxPointer); doc = lastSkipDoc; count += numSkipped; } } // done skipping, now just scan do { if (!Next()) { return(false); } }while (target > doc); return(true); }
public override System.Object Clone() { return(new FaultyIndexInput((IndexInput)delegate_Renamed.Clone())); }
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size) { bool success = false; isOriginal = true; try { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize); cloneableIndexStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize); // First version of fdx did not include a format // header, but, the first int will always be 0 in that // case int firstInt = cloneableIndexStream.ReadInt(); format = firstInt == 0 ? 0 : firstInt; if (format > FieldsWriter.FORMAT_CURRENT) throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower"); formatSize = format > FieldsWriter.FORMAT ? 4 : 0; if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) cloneableFieldsStream.SetModifiedUTF8StringsMode(); fieldsStream = (IndexInput) cloneableFieldsStream.Clone(); long indexSize = cloneableIndexStream.Length() - formatSize; if (docStoreOffset != - 1) { // We read only a slice out of this shared fields file this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); } else { this.docStoreOffset = 0; this.size = (int) (indexSize >> 3); } indexStream = (IndexInput) cloneableIndexStream.Clone(); numTotalDocs = (int) (indexSize >> 3); success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { Dispose(); } } }
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size) { bool success = false; isOriginal = true; try { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize); cloneableIndexStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize); // First version of fdx did not include a format // header, but, the first int will always be 0 in that // case int firstInt = cloneableIndexStream.ReadInt(); format = firstInt == 0 ? 0 : firstInt; if (format > FieldsWriter.FORMAT_CURRENT) { throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower"); } formatSize = format > FieldsWriter.FORMAT ? 4 : 0; if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) { cloneableFieldsStream.SetModifiedUTF8StringsMode(); } fieldsStream = (IndexInput)cloneableFieldsStream.Clone(); long indexSize = cloneableIndexStream.Length() - formatSize; if (docStoreOffset != -1) { // We read only a slice out of this shared fields file this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); } else { this.docStoreOffset = 0; this.size = (int)(indexSize >> 3); } indexStream = (IndexInput)cloneableIndexStream.Clone(); numTotalDocs = (int)(indexSize >> 3); success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { Dispose(); } } }