protected override MonotonicBlockPackedReader GetAddressInstance(IndexInput data, FieldInfo field, BinaryEntry bytes) { data.Seek(bytes.AddressesOffset); return new MonotonicBlockPackedReader((IndexInput)data.Clone(), bytes.PackedIntsVersion, bytes.BlockSize, bytes.Count, true); }
protected override MonotonicBlockPackedReader GetOrdIndexInstance(IndexInput data, FieldInfo field, NumericEntry entry) { data.Seek(entry.Offset); return new MonotonicBlockPackedReader((IndexInput)data.Clone(), entry.PackedIntsVersion, entry.BlockSize, entry.Count, true); }
public override System.Object Clone(IState state) { var clone = (CSIndexInput)base.Clone(state); clone.base_Renamed = (IndexInput)base_Renamed.Clone(state); clone.fileOffset = fileOffset; clone.length = length; return(clone); }
// Used only by clone private FieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int formatSize, int docStoreOffset, IndexInput cloneableFieldsStream, IndexInput cloneableIndexStream, IState state) { this.fieldInfos = fieldInfos; this.numTotalDocs = numTotalDocs; this.size = size; this.format = format; this.formatSize = formatSize; this.docStoreOffset = docStoreOffset; this.cloneableFieldsStream = cloneableFieldsStream; this.cloneableIndexStream = cloneableIndexStream; fieldsStream = (IndexInput)cloneableFieldsStream.Clone(state); indexStream = (IndexInput)cloneableIndexStream.Clone(state); }
public virtual System.Object Clone(IState state) { TermVectorsReader clone = (TermVectorsReader)base.MemberwiseClone(); // These are null when a TermVectorsReader was created // on a segment that did not have term vectors saved if (tvx != null && tvd != null && tvf != null) { clone.tvx = (IndexInput)tvx.Clone(state); clone.tvd = (IndexInput)tvd.Clone(state); clone.tvf = (IndexInput)tvf.Clone(state); } return(clone); }
public virtual void TestCloneSafety() { MMapDirectory mmapDir = new MMapDirectory(CreateTempDir("testCloneSafety")); IndexOutput io = mmapDir.CreateOutput("bytes", NewIOContext(Random())); io.WriteVInt(5); io.Dispose(); IndexInput one = mmapDir.OpenInput("bytes", IOContext.DEFAULT); IndexInput two = (IndexInput)one.Clone(); IndexInput three = (IndexInput)two.Clone(); // clone of clone one.Dispose(); try { one.ReadVInt(); Assert.Fail("Must throw AlreadyClosedException"); } catch (AlreadyClosedException ignore) { // pass } try { two.ReadVInt(); Assert.Fail("Must throw AlreadyClosedException"); } catch (AlreadyClosedException ignore) { // pass } try { three.ReadVInt(); Assert.Fail("Must throw AlreadyClosedException"); } catch (AlreadyClosedException ignore) { // pass } two.Dispose(); three.Dispose(); // test double close of master: one.Dispose(); mmapDir.Dispose(); }
public System.Object Clone(IState state) { SegmentTermEnum clone = null; try { clone = (SegmentTermEnum)base.MemberwiseClone(); } catch (System.Exception) { } clone.input = (IndexInput)input.Clone(state); clone.termInfo = new TermInfo(termInfo); clone.termBuffer = (TermBuffer)termBuffer.Clone(); clone.prevBuffer = (TermBuffer)prevBuffer.Clone(); clone.scanBuffer = new TermBuffer(); return(clone); }
/// <summary>Optimized implementation. </summary> public virtual bool SkipTo(int target, IState state) { if (df >= skipInterval) { // optimized case if (skipListReader == null) { skipListReader = new DefaultSkipListReader((IndexInput)freqStream.Clone(state), maxSkipLevels, skipInterval); // lazily clone } if (!haveSkipped) { // lazily initialize skip stream skipListReader.Init(skipPointer, freqBasePointer, proxBasePointer, df, currentFieldStoresPayloads); haveSkipped = true; } int newCount = skipListReader.SkipTo(target, state); if (newCount > count) { freqStream.Seek(skipListReader.GetFreqPointer(), state); SkipProx(skipListReader.GetProxPointer(), skipListReader.GetPayloadLength()); doc = skipListReader.GetDoc(); count = newCount; } } // done skipping, now just scan do { if (!Next(state)) { return(false); } }while (target > doc); return(true); }
public override IndexInput OpenFullSlice() { return((IndexInput)@base.Clone()); }
public virtual void TestCloneSliceSafety() { MMapDirectory mmapDir = new MMapDirectory(CreateTempDir("testCloneSliceSafety")); IndexOutput io = mmapDir.CreateOutput("bytes", NewIOContext(Random())); io.WriteInt32(1); io.WriteInt32(2); io.Dispose(); IndexInputSlicer slicer = mmapDir.CreateSlicer("bytes", NewIOContext(Random())); IndexInput one = slicer.OpenSlice("first int", 0, 4); IndexInput two = slicer.OpenSlice("second int", 4, 4); IndexInput three = (IndexInput)one.Clone(); // clone of clone IndexInput four = (IndexInput)two.Clone(); // clone of clone slicer.Dispose(); try { one.ReadInt32(); Assert.Fail("Must throw ObjectDisposedException"); } #pragma warning disable 168 catch (ObjectDisposedException ignore) #pragma warning restore 168 { // pass } try { two.ReadInt32(); Assert.Fail("Must throw ObjectDisposedException"); } #pragma warning disable 168 catch (ObjectDisposedException ignore) #pragma warning restore 168 { // pass } try { three.ReadInt32(); Assert.Fail("Must throw ObjectDisposedException"); } #pragma warning disable 168 catch (ObjectDisposedException ignore) #pragma warning restore 168 { // pass } try { four.ReadInt32(); Assert.Fail("Must throw ObjectDisposedException"); } #pragma warning disable 168 catch (ObjectDisposedException ignore) #pragma warning restore 168 { // pass } one.Dispose(); two.Dispose(); three.Dispose(); four.Dispose(); // test double-close of slicer: slicer.Dispose(); mmapDir.Dispose(); }
internal CSIndexInput(IndexInput @base, long fileOffset, long length, int readBufferSize, IState state) : base(readBufferSize) { this.base_Renamed = (IndexInput)@base.Clone(state); this.fileOffset = fileOffset; this.length = length; }
public override System.Object Clone(IState state) { return(new FaultyIndexInput((IndexInput)delegate_Renamed.Clone(null))); }
/// <summary> /// Clones the provided input, reads all bytes from the file, and calls <seealso cref="#checkFooter"/> /// <p> /// Note that this method may be slow, as it must process the entire file. /// If you just need to extract the checksum value, call <seealso cref="#retrieveChecksum"/>. /// </summary> public static long ChecksumEntireFile(IndexInput input) { IndexInput clone = (IndexInput)input.Clone(); clone.Seek(0); ChecksumIndexInput @in = new BufferedChecksumIndexInput(clone); Debug.Assert(@in.FilePointer == 0); @in.Seek(@in.Length() - FooterLength()); return CheckFooter(@in); }
internal SlicedIndexInput(string sliceDescription, IndexInput @base, long fileOffset, long length, int readBufferSize) : base("SlicedIndexInput(" + sliceDescription + " in " + @base + " slice=" + fileOffset + ":" + (fileOffset + length) + ")", readBufferSize) { this.@base = (IndexInput)@base.Clone(); this.FileOffset = fileOffset; this.Length_Renamed = length; }
internal FieldsReader(Directory d, System.String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size, IState state) { bool success = false; isOriginal = true; try { fieldInfos = fn; cloneableFieldsStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize, state); cloneableIndexStream = d.OpenInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize, state); // First version of fdx did not include a format // header, but, the first int will always be 0 in that // case int firstInt = cloneableIndexStream.ReadInt(state); format = firstInt == 0 ? 0 : firstInt; if (format > FieldsWriter.FORMAT_CURRENT) { throw new CorruptIndexException("Incompatible format version: " + format + " expected " + FieldsWriter.FORMAT_CURRENT + " or lower"); } formatSize = format > FieldsWriter.FORMAT ? 4 : 0; if (format < FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) { cloneableFieldsStream.SetModifiedUTF8StringsMode(); } fieldsStream = (IndexInput)cloneableFieldsStream.Clone(state); long indexSize = cloneableIndexStream.Length(state) - formatSize; if (docStoreOffset != -1) { // We read only a slice out of this shared fields file this.docStoreOffset = docStoreOffset; this.size = size; // Verify the file is long enough to hold all of our // docs System.Diagnostics.Debug.Assert(((int)(indexSize / 8)) >= size + this.docStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset); } else { this.docStoreOffset = 0; this.size = (int)(indexSize >> 3); } indexStream = (IndexInput)cloneableIndexStream.Clone(state); numTotalDocs = (int)(indexSize >> 3); success = true; } finally { // With lock-less commits, it's entirely possible (and // fine) to hit a FileNotFound exception above. In // this case, we want to explicitly close any subset // of things that were opened so that we don't have to // wait for a GC to do so. if (!success) { Dispose(); } } }