internal void RollbackCommit(Directory dir) { if (pendingSegnOutput != null) { try { pendingSegnOutput.Close(); } catch (System.Exception t) { // Suppress so we keep throwing the original exception // in our caller } // Must carefully compute fileName from "generation" // since lastGeneration isn't incremented: try { System.String segmentFileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation); dir.DeleteFile(segmentFileName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception // in our caller } pendingSegnOutput = null; } }
/// <summary>Used for debugging </summary> public System.String SegString(Directory dir) { System.String cfs; try { if (GetUseCompoundFile()) { cfs = "c"; } else { cfs = "C"; } } catch (System.IO.IOException ioe) { cfs = "?"; } System.String docStore; if (docStoreOffset != -1) { docStore = "->" + docStoreSegment; } else { docStore = ""; } return(name + ":" + cfs + (this.dir == dir?"":"x") + docCount + docStore); }
/// <summary>This ctor used only by test code. /// /// </summary> /// <param name="dir">The Directory to merge the other segments into /// </param> /// <param name="name">The name of the new segment /// </param> public /*internal*/ SegmentMerger(Directory dir, System.String name) { InitBlock(); directory = dir; segment = name; checkAbort = new AnonymousClassCheckAbort(this, null, null); }
/// <summary>Call this to start a commit. This writes the new /// segments file, but writes an invalid checksum at the /// end, so that it is not visible to readers. Once this /// is called you must call {@link #finishCommit} to complete /// the commit or {@link #rollbackCommit} to abort it. /// </summary> internal void PrepareCommit(Directory dir) { if (pendingSegnOutput != null) { throw new System.SystemException("prepareCommit was already called"); } Write(dir); }
/// <summary> Open an index with write access. /// /// </summary> /// <param name="dirName">the index directory /// </param> /// <param name="analyzer">the analyzer to use for adding new documents /// </param> /// <param name="create"><code>true</code> to create the index or overwrite the existing one; /// <code>false</code> to append to the existing index /// </param> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> LockObtainFailedException if another writer </throws> /// <summary> has this index open (<code>write.lock</code> could not /// be obtained) /// </summary> /// <throws> IOException if there is a low-level IO error </throws> public IndexModifier(System.String dirName, Analyzer analyzer, bool create) { InitBlock(); Directory dir = FSDirectory.GetDirectory(dirName); this.closeDir = true; Init(dir, analyzer, create); }
/// <summary> Open an index with write access. /// /// </summary> /// <param name="file">the index directory /// </param> /// <param name="analyzer">the analyzer to use for adding new documents /// </param> /// <param name="create"><code>true</code> to create the index or overwrite the existing one; /// <code>false</code> to append to the existing index /// </param> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> LockObtainFailedException if another writer </throws> /// <summary> has this index open (<code>write.lock</code> could not /// be obtained) /// </summary> /// <throws> IOException if there is a low-level IO error </throws> public IndexModifier(System.IO.FileInfo file, Analyzer analyzer, bool create) { InitBlock(); Directory dir = FSDirectory.GetDirectory(file); this.closeDir = true; Init(dir, analyzer, create); }
internal virtual void CheckAborted(Directory dir) { lock (this) { if (aborted) throw new MergeAbortedException("merge is aborted: " + SegString(dir)); } }
public virtual System.String SegString(Directory dir) { System.Text.StringBuilder b = new System.Text.StringBuilder(); b.Append("MergeSpec:\n"); int count = merges.Count; for (int i = 0; i < count; i++) b.Append(" ").Append(1 + i).Append(": ").Append(((OneMerge) merges[i]).SegString(dir)); return b.ToString(); }
/// <summary> Initialize an IndexWriter.</summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> LockObtainFailedException if another writer </throws> /// <summary> has this index open (<code>write.lock</code> could not /// be obtained) /// </summary> /// <throws> IOException if there is a low-level IO error </throws> protected internal virtual void Init(Directory directory, Analyzer analyzer, bool create) { this.directory = directory; lock (this.directory) { this.analyzer = analyzer; indexWriter = new IndexWriter(directory, analyzer, create, IndexWriter.MaxFieldLength.LIMITED); open = true; } }
/// <summary> Get the generation (N) of the current segments_N file /// in the directory. /// /// </summary> /// <param name="directory">-- directory to search for the latest segments_N file /// </param> public static long GetCurrentSegmentGeneration(Directory directory) { try { return(GetCurrentSegmentGeneration(directory.ListAll())); } catch (NoSuchDirectoryException nsde) { return(-1); } }
public SegmentWriteState(DocumentsWriter docWriter, Directory directory, System.String segmentName, System.String docStoreSegmentName, int numDocs, int numDocsInStore, int termIndexInterval) { this.docWriter = docWriter; this.directory = directory; this.segmentName = segmentName; this.docStoreSegmentName = docStoreSegmentName; this.numDocs = numDocs; this.numDocsInStore = numDocsInStore; this.termIndexInterval = termIndexInterval; flushedFiles = new System.Collections.Hashtable(); }
public SegmentInfo(System.String name, int docCount, Directory dir, bool isCompoundFile, bool hasSingleNormFile, int docStoreOffset, System.String docStoreSegment, bool docStoreIsCompoundFile, bool hasProx) : this(name, docCount, dir) { this.isCompoundFile = (sbyte)(isCompoundFile?YES:NO); this.hasSingleNormFile = hasSingleNormFile; preLockless = false; this.docStoreOffset = docStoreOffset; this.docStoreSegment = docStoreSegment; this.docStoreIsCompoundFile = docStoreIsCompoundFile; this.hasProx = hasProx; delCount = 0; System.Diagnostics.Debug.Assert(docStoreOffset == -1 || docStoreSegment != null, "dso=" + docStoreOffset + " dss=" + docStoreSegment + " docCount=" + docCount); }
internal CompoundFileWriter(Directory dir, System.String name, SegmentMerger.CheckAbort checkAbort) { if (dir == null) throw new System.NullReferenceException("directory cannot be null"); if (name == null) throw new System.NullReferenceException("name cannot be null"); this.checkAbort = checkAbort; directory = dir; fileName = name; ids = new System.Collections.Hashtable(); entries = new System.Collections.ArrayList(); }
public TermVectorsWriter(Directory directory, System.String segment, FieldInfos fieldInfos) { // Open files for TermVector storage tvx = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION); tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvd = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION); tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT); tvf = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION); tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT); this.fieldInfos = fieldInfos; }
public static bool Check(Directory dir, bool doFix, System.Collections.IList onlySegments) { CheckIndex checker = new CheckIndex(dir); Status status = checker.CheckIndex_Renamed_Method(onlySegments); if (doFix && !status.clean) { checker.FixIndex(status); } return(status.clean); }
public CompoundFileReader(Directory dir, System.String name, int readBufferSize) { directory = dir; fileName = name; this.readBufferSize = readBufferSize; bool success = false; try { stream = dir.OpenInput(name, readBufferSize); // read the directory and init files int count = stream.ReadVInt(); FileEntry entry = null; for (int i = 0; i < count; i++) { long offset = stream.ReadLong(); System.String id = stream.ReadString(); if (entry != null) { // set length of the previous entry entry.length = offset - entry.offset; } entry = new FileEntry(); entry.offset = offset; entries[id] = entry; } // set the length of the final entry if (entry != null) { entry.length = stream.Length() - entry.offset; } success = true; } finally { if (!success && (stream != null)) { try { stream.Close(); } catch (System.IO.IOException e) { } } } }
public void Write(Directory d, System.String name) { IndexOutput output = d.CreateOutput(name); try { Write(output); } finally { output.Close(); } }
private void Initialize(Directory directory, System.String segment, FieldInfos fis, int interval, bool isi) { indexInterval = interval; fieldInfos = fis; isIndex = isi; output = directory.CreateOutput(segment + (isIndex?".tii":".tis")); output.WriteInt(FORMAT_CURRENT); // write format output.WriteLong(0); // leave space for size output.WriteInt(indexInterval); // write indexInterval output.WriteInt(skipInterval); // write skipInterval output.WriteInt(maxSkipLevels); // write maxSkipLevels System.Diagnostics.Debug.Assert(InitUTF16Results()); }
// Used only for testing public bool HasExternalSegments(Directory dir) { int numSegments = Count; for (int i = 0; i < numSegments; i++) { if (Info(i).dir != dir) { return(true); } } return(false); }
/// <summary> Current version number from segments file.</summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public static long ReadCurrentVersion(Directory directory) { // Fully read the segments file: this ensures that it's // completely written so that if // IndexWriter.prepareCommit has been called (but not // yet commit), then the reader will still see itself as // current: SegmentInfos sis = new SegmentInfos(); sis.Read(directory); return(sis.version); //return (long) ((System.Int64) new AnonymousClassFindSegmentsFile1(directory).Run()); //DIGY: AnonymousClassFindSegmentsFile1 can safely be deleted }
internal SegmentMerger(IndexWriter writer, System.String name, MergePolicy.OneMerge merge) { InitBlock(); directory = writer.GetDirectory(); segment = name; if (merge != null) { checkAbort = new CheckAbort(merge, directory); } else { checkAbort = new AnonymousClassCheckAbort1(this, null, null); } termIndexInterval = writer.GetTermIndexInterval(); }
public SegmentInfo(System.String name, int docCount, Directory dir) { this.name = name; this.docCount = docCount; this.dir = dir; delGen = NO; isCompoundFile = (sbyte)(CHECK_DIR); preLockless = true; hasSingleNormFile = false; docStoreOffset = -1; docStoreSegment = name; docStoreIsCompoundFile = false; delCount = 0; hasProx = true; }
public CommitPoint(IndexFileDeleter enclosingInstance, System.Collections.ICollection commitsToDelete, Directory directory, SegmentInfos segmentInfos) { InitBlock(enclosingInstance); this.directory = directory; this.commitsToDelete = commitsToDelete; userData = segmentInfos.GetUserData(); segmentsFileName = segmentInfos.GetCurrentSegmentFileName(); version = segmentInfos.GetVersion(); generation = segmentInfos.GetGeneration(); files = segmentInfos.Files(directory, true); gen = segmentInfos.GetGeneration(); isOptimized = segmentInfos.Count == 1 && !segmentInfos.Info(0).HasDeletions(); System.Diagnostics.Debug.Assert(!segmentInfos.HasExternalSegments(directory)); }
internal CompoundFileWriter(Directory dir, System.String name, SegmentMerger.CheckAbort checkAbort) { if (dir == null) { throw new System.NullReferenceException("directory cannot be null"); } if (name == null) { throw new System.NullReferenceException("name cannot be null"); } this.checkAbort = checkAbort; directory = dir; fileName = name; ids = new System.Collections.Hashtable(); entries = new System.Collections.ArrayList(); }
/// <summary> Construct a FieldInfos object using the directory and the name of the file /// IndexInput /// </summary> /// <param name="d">The directory to open the IndexInput from /// </param> /// <param name="name">The name of the file to open the IndexInput from in the Directory /// </param> /// <throws> IOException </throws> public /*internal*/ FieldInfos(Directory d, System.String name) { IndexInput input = d.OpenInput(name); try { try { Read(input, name); } catch (System.IO.IOException ioe) { if (format == FORMAT_PRE) { // LUCENE-1623: FORMAT_PRE (before there was a // format) may be 2.3.2 (pre-utf8) or 2.4.x (utf8) // encoding; retry with input set to pre-utf8 input.Seek(0); input.SetModifiedUTF8StringsMode(); byNumber.Clear(); byName.Clear(); try { Read(input, name); } catch (System.Exception t) { // Ignore any new exception & throw original IOE throw ioe; } } else { // The IOException cannot be caused by // LUCENE-1623, so re-throw it throw ioe; } } } finally { input.Close(); } }
public FormatPostingsFieldsWriter(SegmentWriteState state, FieldInfos fieldInfos) : base() { dir = state.directory; segment = state.segmentName; totalNumDocs = state.numDocs; this.fieldInfos = fieldInfos; termsOut = new TermInfosWriter(dir, segment, fieldInfos, state.termIndexInterval); // TODO: this is a nasty abstraction violation (that we // peek down to find freqOut/proxOut) -- we need a // better abstraction here whereby these child consumers // can provide skip data or not skipListWriter = new DefaultSkipListWriter(termsOut.skipInterval, termsOut.maxSkipLevels, totalNumDocs, null, null); SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, state.SegmentFileName(IndexFileNames.TERMS_EXTENSION)); SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, state.SegmentFileName(IndexFileNames.TERMS_INDEX_EXTENSION)); termsWriter = new FormatPostingsTermsWriter(state, this); }
public FormatPostingsFieldsWriter(SegmentWriteState state, FieldInfos fieldInfos):base() { dir = state.directory; segment = state.segmentName; totalNumDocs = state.numDocs; this.fieldInfos = fieldInfos; termsOut = new TermInfosWriter(dir, segment, fieldInfos, state.termIndexInterval); // TODO: this is a nasty abstraction violation (that we // peek down to find freqOut/proxOut) -- we need a // better abstraction here whereby these child consumers // can provide skip data or not skipListWriter = new DefaultSkipListWriter(termsOut.skipInterval, termsOut.maxSkipLevels, totalNumDocs, null, null); SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, state.SegmentFileName(IndexFileNames.TERMS_EXTENSION)); SupportClass.CollectionsHelper.AddIfNotContains(state.flushedFiles, state.SegmentFileName(IndexFileNames.TERMS_INDEX_EXTENSION)); termsWriter = new FormatPostingsTermsWriter(state, this); }
/// <summary>Returns all file names referenced by SegmentInfo /// instances matching the provided Directory (ie files /// associated with any "external" segments are skipped). /// The returned collection is recomputed on each /// invocation. /// </summary> public System.Collections.Generic.ICollection <string> Files(Directory dir, bool includeSegmentsFile) { System.Collections.Generic.Dictionary <string, string> files = new System.Collections.Generic.Dictionary <string, string>(); if (includeSegmentsFile) { string tmp = GetCurrentSegmentFileName(); files.Add(tmp, tmp); } int size = Count; for (int i = 0; i < size; i++) { SegmentInfo info = Info(i); if (info.dir == dir) { SupportClass.CollectionsHelper.AddAllIfNotContains(files, Info(i).Files()); } } return(files.Keys); }
/// <summary>Writes this vector to the file <code>name</code> in Directory /// <code>d</code>, in a format that can be read by the constructor {@link /// #BitVector(Directory, String)}. /// </summary> public void Write(Directory d, System.String name) { IndexOutput output = d.CreateOutput(name); try { if (IsSparse()) { WriteDgaps(output); // sparse bit-set more efficiently saved as d-gaps. } else { WriteBits(output); } } finally { output.Close(); } }
internal virtual System.String SegString(Directory dir) { System.Text.StringBuilder b = new System.Text.StringBuilder(); int numSegments = segments.Count; for (int i = 0; i < numSegments; i++) { if (i > 0) b.Append(' '); b.Append(segments.Info(i).SegString(dir)); } if (info != null) b.Append(" into ").Append(info.name); if (optimize) b.Append(" [optimize]"); if (mergeDocStores) { b.Append(" [mergeDocStores]"); } return b.ToString(); }
/// <summary>Constructs a bit vector from the file <code>name</code> in Directory /// <code>d</code>, as written by the {@link #write} method. /// </summary> public BitVector(Directory d, System.String name) { IndexInput input = d.OpenInput(name); try { size = input.ReadInt(); // read size if (size == -1) { ReadDgaps(input); } else { ReadBits(input); } } finally { input.Close(); } }
public System.String SegString(Directory directory) { lock (this) { System.Text.StringBuilder buffer = new System.Text.StringBuilder(); int count = Count; for (int i = 0; i < count; i++) { if (i > 0) { buffer.Append(' '); } SegmentInfo info = Info(i); buffer.Append(info.SegString(directory)); if (info.dir != directory) { buffer.Append("**"); } } return(buffer.ToString()); } }
/// <summary>Used for debugging </summary> public System.String SegString(Directory dir) { System.String cfs; try { if (GetUseCompoundFile()) cfs = "c"; else cfs = "C"; } catch (System.IO.IOException ioe) { cfs = "?"; } System.String docStore; if (docStoreOffset != - 1) docStore = "->" + docStoreSegment; else docStore = ""; return name + ":" + cfs + (this.dir == dir?"":"x") + docCount + docStore; }
/// <summary> Construct a new SegmentInfo instance by reading a /// previously saved SegmentInfo from input. /// /// </summary> /// <param name="dir">directory to load from /// </param> /// <param name="format">format of the segments info file /// </param> /// <param name="input">input handle to read segment info from /// </param> internal SegmentInfo(Directory dir, int format, IndexInput input) { this.dir = dir; name = input.ReadString(); docCount = input.ReadInt(); if (format <= SegmentInfos.FORMAT_LOCKLESS) { delGen = input.ReadLong(); if (format <= SegmentInfos.FORMAT_SHARED_DOC_STORE) { docStoreOffset = input.ReadInt(); if (docStoreOffset != - 1) { docStoreSegment = input.ReadString(); docStoreIsCompoundFile = (1 == input.ReadByte()); } else { docStoreSegment = name; docStoreIsCompoundFile = false; } } else { docStoreOffset = - 1; docStoreSegment = name; docStoreIsCompoundFile = false; } if (format <= SegmentInfos.FORMAT_SINGLE_NORM_FILE) { hasSingleNormFile = (1 == input.ReadByte()); } else { hasSingleNormFile = false; } int numNormGen = input.ReadInt(); if (numNormGen == NO) { normGen = null; } else { normGen = new long[numNormGen]; for (int j = 0; j < numNormGen; j++) { normGen[j] = input.ReadLong(); } } isCompoundFile = (sbyte) input.ReadByte(); preLockless = (isCompoundFile == CHECK_DIR); if (format <= SegmentInfos.FORMAT_DEL_COUNT) { delCount = input.ReadInt(); System.Diagnostics.Debug.Assert(delCount <= docCount); } else delCount = - 1; if (format <= SegmentInfos.FORMAT_HAS_PROX) hasProx = input.ReadByte() == 1; else hasProx = true; if (format <= SegmentInfos.FORMAT_DIAGNOSTICS) { diagnostics = input.ReadStringStringMap(); } else { diagnostics = new System.Collections.Generic.Dictionary<string,string>(); } } else { delGen = CHECK_DIR; normGen = null; isCompoundFile = (sbyte) (CHECK_DIR); preLockless = true; hasSingleNormFile = false; docStoreOffset = - 1; docStoreIsCompoundFile = false; docStoreSegment = null; delCount = - 1; hasProx = true; diagnostics = new System.Collections.Generic.Dictionary<string,string>(); } }
/// <summary> Copy everything from src SegmentInfo into our instance.</summary> internal void Reset(SegmentInfo src) { ClearFiles(); name = src.name; docCount = src.docCount; dir = src.dir; preLockless = src.preLockless; delGen = src.delGen; docStoreOffset = src.docStoreOffset; docStoreIsCompoundFile = src.docStoreIsCompoundFile; if (src.normGen == null) { normGen = null; } else { normGen = new long[src.normGen.Length]; Array.Copy(src.normGen, 0, normGen, 0, src.normGen.Length); } isCompoundFile = src.isCompoundFile; hasSingleNormFile = src.hasSingleNormFile; delCount = src.delCount; }
/// <summary> This version of read uses the retry logic (for lock-less /// commits) to find the right segments file to load. /// </summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public void Read(Directory directory) { generation = lastGeneration = - 1; new AnonymousClassFindSegmentsFile(this, directory).Run(); }
public System.String SegString(Directory directory) { lock (this) { System.Text.StringBuilder buffer = new System.Text.StringBuilder(); int count = Count; for (int i = 0; i < count; i++) { if (i > 0) { buffer.Append(' '); } SegmentInfo info = Info(i); buffer.Append(info.SegString(directory)); if (info.dir != directory) buffer.Append("**"); } return buffer.ToString(); } }
// Used only for testing public bool HasExternalSegments(Directory dir) { int numSegments = Count; for (int i = 0; i < numSegments; i++) if (Info(i).dir != dir) return true; return false; }
/// <summary>Call this to start a commit. This writes the new /// segments file, but writes an invalid checksum at the /// end, so that it is not visible to readers. Once this /// is called you must call {@link #finishCommit} to complete /// the commit or {@link #rollbackCommit} to abort it. /// </summary> internal void PrepareCommit(Directory dir) { if (pendingSegnOutput != null) throw new System.SystemException("prepareCommit was already called"); Write(dir); }
public FindSegmentsFile(Directory directory) { this.directory = directory; }
/// <summary> Returns userData from latest segments file</summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public static System.Collections.Generic.IDictionary<string, string> ReadCurrentUserData(Directory directory) { SegmentInfos sis = new SegmentInfos(); sis.Read(directory); return sis.GetUserData(); }
/// <summary> Current version number from segments file.</summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public static long ReadCurrentVersion(Directory directory) { // Fully read the segments file: this ensures that it's // completely written so that if // IndexWriter.prepareCommit has been called (but not // yet commit), then the reader will still see itself as // current: SegmentInfos sis = new SegmentInfos(); sis.Read(directory); return sis.version; //return (long) ((System.Int64) new AnonymousClassFindSegmentsFile1(directory).Run()); //DIGY: AnonymousClassFindSegmentsFile1 can safely be deleted }
private void Write(Directory directory) { System.String segmentFileName = GetNextSegmentFileName(); // Always advance the generation on write: if (generation == - 1) { generation = 1; } else { generation++; } ChecksumIndexOutput segnOutput = new ChecksumIndexOutput(directory.CreateOutput(segmentFileName)); bool success = false; try { segnOutput.WriteInt(CURRENT_FORMAT); // write FORMAT segnOutput.WriteLong(++version); // every write changes // the index segnOutput.WriteInt(counter); // write counter segnOutput.WriteInt(Count); // write infos for (int i = 0; i < Count; i++) { Info(i).Write(segnOutput); } segnOutput.WriteStringStringMap(userData); segnOutput.PrepareCommit(); success = true; pendingSegnOutput = segnOutput; } finally { if (!success) { // We hit an exception above; try to close the file // but suppress any exception: try { segnOutput.Close(); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } try { // Try not to leave a truncated segments_N file in // the index: directory.DeleteFile(segmentFileName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } } } }
public SegmentInfo(System.String name, int docCount, Directory dir) { this.name = name; this.docCount = docCount; this.dir = dir; delGen = NO; isCompoundFile = (sbyte) (CHECK_DIR); preLockless = true; hasSingleNormFile = false; docStoreOffset = - 1; docStoreSegment = name; docStoreIsCompoundFile = false; delCount = 0; hasProx = true; }
/// <summary> Get the generation (N) of the current segments_N file /// in the directory. /// /// </summary> /// <param name="directory">-- directory to search for the latest segments_N file /// </param> public static long GetCurrentSegmentGeneration(Directory directory) { try { return GetCurrentSegmentGeneration(directory.ListAll()); } catch (NoSuchDirectoryException nsde) { return - 1; } }
/// <summary> Get the filename of the current segments_N file /// in the directory. /// /// </summary> /// <param name="directory">-- directory to search for the latest segments_N file /// </param> public static System.String GetCurrentSegmentFileName(Directory directory) { return IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", GetCurrentSegmentGeneration(directory)); }
public SegmentInfo(System.String name, int docCount, Directory dir, bool isCompoundFile, bool hasSingleNormFile):this(name, docCount, dir, isCompoundFile, hasSingleNormFile, - 1, null, false, true) { }
/// <summary> Initialize the deleter: find all previous commits in /// the Directory, incref the files they reference, call /// the policy to let it delete commits. This will remove /// any files not referenced by any of the commits. /// </summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, System.IO.StreamWriter infoStream, DocumentsWriter docWriter, System.Collections.Generic.Dictionary<string, string> synced) { this.docWriter = docWriter; this.infoStream = infoStream; this.synced = synced; if (infoStream != null) { Message("init: current segments file is \"" + segmentInfos.GetCurrentSegmentFileName() + "\"; deletionPolicy=" + policy); } this.policy = policy; this.directory = directory; // First pass: walk the files and initialize our ref // counts: long currentGen = segmentInfos.GetGeneration(); IndexFileNameFilter filter = IndexFileNameFilter.GetFilter(); System.String[] files = directory.ListAll(); CommitPoint currentCommitPoint = null; for (int i = 0; i < files.Length; i++) { System.String fileName = files[i]; if (filter.Accept(null, fileName) && !fileName.Equals(IndexFileNames.SEGMENTS_GEN)) { // Add this file to refCounts with initial count 0: GetRefCount(fileName); if (fileName.StartsWith(IndexFileNames.SEGMENTS)) { // This is a commit (segments or segments_N), and // it's valid (<= the max gen). Load it, then // incref all files it refers to: if (infoStream != null) { Message("init: load commit \"" + fileName + "\""); } SegmentInfos sis = new SegmentInfos(); try { sis.Read(directory, fileName); } catch (System.IO.FileNotFoundException e) { // LUCENE-948: on NFS (and maybe others), if // you have writers switching back and forth // between machines, it's very likely that the // dir listing will be stale and will claim a // file segments_X exists when in fact it // doesn't. So, we catch this and handle it // as if the file does not exist if (infoStream != null) { Message("init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point"); } sis = null; } catch (System.IO.IOException e) { if (SegmentInfos.GenerationFromSegmentsFileName(fileName) <= currentGen) { throw e; } else { // Most likely we are opening an index that // has an aborted "future" commit, so suppress // exc in this case sis = null; } } if (sis != null) { CommitPoint commitPoint = new CommitPoint(this,commitsToDelete, directory, sis); if (sis.GetGeneration() == segmentInfos.GetGeneration()) { currentCommitPoint = commitPoint; } commits.Add(commitPoint); IncRef(sis, true); if (lastSegmentInfos == null || sis.GetGeneration() > lastSegmentInfos.GetGeneration()) { lastSegmentInfos = sis; } } } } } if (currentCommitPoint == null) { // We did not in fact see the segments_N file // corresponding to the segmentInfos that was passed // in. Yet, it must exist, because our caller holds // the write lock. This can happen when the directory // listing was stale (eg when index accessed via NFS // client with stale directory listing cache). So we // try now to explicitly open this commit point: SegmentInfos sis = new SegmentInfos(); try { sis.Read(directory, segmentInfos.GetCurrentSegmentFileName()); } catch (System.IO.IOException e) { throw new CorruptIndexException("failed to locate current segments_N file"); } if (infoStream != null) Message("forced open of current segments file " + segmentInfos.GetCurrentSegmentFileName()); currentCommitPoint = new CommitPoint(this, commitsToDelete, directory, sis); commits.Add(currentCommitPoint); IncRef(sis, true); } // We keep commits list in sorted order (oldest to newest): commits.Sort(); // Now delete anything with ref count at 0. These are // presumably abandoned files eg due to crash of // IndexWriter. System.Collections.Generic.IEnumerator<System.Collections.Generic.KeyValuePair<System.String, RefCount>> it = refCounts.GetEnumerator(); while (it.MoveNext()) { System.String fileName = (System.String) it.Current.Key; RefCount rc = (RefCount) refCounts[fileName]; if (0 == rc.count) { if (infoStream != null) { Message("init: removing unreferenced file \"" + fileName + "\""); } DeleteFile(fileName); } } // Finally, give policy a chance to remove things on // startup: policy.OnInit(commits); // Always protect the incoming segmentInfos since // sometime it may not be the most recent commit Checkpoint(segmentInfos, false); startingCommitDeleted = currentCommitPoint.IsDeleted(); DeleteCommits(); }
public SegmentInfo(System.String name, int docCount, Directory dir, bool isCompoundFile, bool hasSingleNormFile, int docStoreOffset, System.String docStoreSegment, bool docStoreIsCompoundFile, bool hasProx):this(name, docCount, dir) { this.isCompoundFile = (sbyte) (isCompoundFile?YES:NO); this.hasSingleNormFile = hasSingleNormFile; preLockless = false; this.docStoreOffset = docStoreOffset; this.docStoreSegment = docStoreSegment; this.docStoreIsCompoundFile = docStoreIsCompoundFile; this.hasProx = hasProx; delCount = 0; System.Diagnostics.Debug.Assert(docStoreOffset == - 1 || docStoreSegment != null, "dso=" + docStoreOffset + " dss=" + docStoreSegment + " docCount=" + docCount); }
/// <summary>Returns all file names referenced by SegmentInfo /// instances matching the provided Directory (ie files /// associated with any "external" segments are skipped). /// The returned collection is recomputed on each /// invocation. /// </summary> public System.Collections.Generic.ICollection<string> Files(Directory dir, bool includeSegmentsFile) { System.Collections.Generic.Dictionary<string, string> files = new System.Collections.Generic.Dictionary<string, string>(); if (includeSegmentsFile) { string tmp = GetCurrentSegmentFileName(); files.Add(tmp, tmp); } int size = Count; for (int i = 0; i < size; i++) { SegmentInfo info = Info(i); if (info.dir == dir) { SupportClass.CollectionsHelper.AddAllIfNotContains(files, Info(i).Files()); } } return files.Keys; }
/// <summary> Read a particular segmentFileName. Note that this may /// throw an IOException if a commit is in process. /// /// </summary> /// <param name="directory">-- directory containing the segments file /// </param> /// <param name="segmentFileName">-- segment file to load /// </param> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public void Read(Directory directory, System.String segmentFileName) { bool success = false; // Clear any previous segments: Clear(); ChecksumIndexInput input = new ChecksumIndexInput(directory.OpenInput(segmentFileName)); generation = GenerationFromSegmentsFileName(segmentFileName); lastGeneration = generation; try { int format = input.ReadInt(); if (format < 0) { // file contains explicit format info // check that it is a format we can understand if (format < CURRENT_FORMAT) throw new CorruptIndexException("Unknown format version: " + format); version = input.ReadLong(); // read version counter = input.ReadInt(); // read counter } else { // file is in old format without explicit format info counter = format; } for (int i = input.ReadInt(); i > 0; i--) { // read segmentInfos Add(new SegmentInfo(directory, format, input)); } if (format >= 0) { // in old format the version number may be at the end of the file if (input.GetFilePointer() >= input.Length()) version = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond); // old file format without version number else version = input.ReadLong(); // read version } if (format <= FORMAT_USER_DATA) { if (format <= FORMAT_DIAGNOSTICS) { userData = input.ReadStringStringMap(); } else if (0 != input.ReadByte()) { userData = new System.Collections.Generic.Dictionary<string,string>(); userData.Add("userData", input.ReadString()); } else { userData = new System.Collections.Generic.Dictionary<string, string>(); } } else { userData = new System.Collections.Generic.Dictionary<string, string>(); } if (format <= FORMAT_CHECKSUM) { long checksumNow = input.GetChecksum(); long checksumThen = input.ReadLong(); if (checksumNow != checksumThen) throw new CorruptIndexException("checksum mismatch in segments file"); } success = true; } finally { input.Close(); if (!success) { // Clear any segment infos we had loaded so we // have a clean slate on retry: Clear(); } } }
/// <summary>Writes & syncs to the Directory dir, taking care to /// remove the segments file on exception /// </summary> public /*internal*/ void Commit(Directory dir) { PrepareCommit(dir); FinishCommit(dir); }
internal void FinishCommit(Directory dir) { if (pendingSegnOutput == null) throw new System.SystemException("prepareCommit was not called"); bool success = false; try { pendingSegnOutput.FinishCommit(); pendingSegnOutput.Close(); pendingSegnOutput = null; success = true; } finally { if (!success) RollbackCommit(dir); } // NOTE: if we crash here, we have left a segments_N // file in the directory in a possibly corrupt state (if // some bytes made it to stable storage and others // didn't). But, the segments_N file includes checksum // at the end, which should catch this case. So when a // reader tries to read it, it will throw a // CorruptIndexException, which should cause the retry // logic in SegmentInfos to kick in and load the last // good (previous) segments_N-1 file. System.String fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation); success = false; try { dir.Sync(fileName); success = true; } finally { if (!success) { try { dir.DeleteFile(fileName); } catch (System.Exception t) { // Suppress so we keep throwing the original exception } } } lastGeneration = generation; try { IndexOutput genOutput = dir.CreateOutput(IndexFileNames.SEGMENTS_GEN); try { genOutput.WriteInt(FORMAT_LOCKLESS); genOutput.WriteLong(generation); genOutput.WriteLong(generation); } finally { genOutput.Close(); } } catch (System.Exception t) { // It's OK if we fail to write this file since it's // used only as one of the retry fallbacks. } }