/// <summary>Returns true if this single info is optimized (has no /// pending norms or deletes, is in the same dir as the /// writer, and matches the current compound file setting /// </summary> private bool IsOptimized(SegmentInfo info, IState state) { bool hasDeletions = writer.NumDeletedDocs(info, state) > 0; return(!hasDeletions && !info.HasSeparateNorms(state) && info.dir == writer.Directory && (info.GetUseCompoundFile(state) == useCompoundFile || internalNoCFSRatio < 1.0)); }
/// <summary>Returns true if this single info is optimized (has no /// pending norms or deletes, is in the same dir as the /// writer, and matches the current compound file setting /// </summary> private bool IsOptimized(SegmentInfo info) { bool hasDeletions = writer.NumDeletedDocs(info) > 0; return(!hasDeletions && !info.HasSeparateNorms() && info.dir == writer.GetDirectory() && (info.GetUseCompoundFile() == useCompoundFile || noCFSRatio < 1.0)); }
private void OpenNorms(Directory cfsDir, int readBufferSize) { long nextNormSeek = SegmentMerger.NORMS_HEADER.Length; //skip header (header unused for now) int maxDoc = MaxDoc(); for (int i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (norms.Contains(fi.name)) { // in case this SegmentReader is being re-opened, we might be able to // reuse some norm instances and skip loading them here continue; } if (fi.isIndexed && !fi.omitNorms) { Directory d = Directory(); System.String fileName = si.GetNormFileName(fi.number); if (!si.HasSeparateNorms(fi.number)) { d = cfsDir; } // singleNormFile means multiple norms share this file bool singleNormFile = fileName.EndsWith("." + IndexFileNames.NORMS_EXTENSION); IndexInput normInput = null; long normSeek; if (singleNormFile) { normSeek = nextNormSeek; if (singleNormStream == null) { singleNormStream = d.OpenInput(fileName, readBufferSize); } // All norms in the .nrm file can share a single IndexInput since // they are only used in a synchronized context. // If this were to change in the future, a clone could be done here. normInput = singleNormStream; } else { normSeek = 0; normInput = d.OpenInput(fileName); } norms[fi.name] = new Norm(this, normInput, singleNormFile, fi.number, normSeek); nextNormSeek += maxDoc; // increment also if some norms are separate } } }
private void OpenNorms(Directory cfsDir) { long nextNormSeek = SegmentMerger.NORMS_HEADER.Length; //skip header (header unused for now) int maxDoc = MaxDoc(); for (int i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (fi.isIndexed && !fi.omitNorms) { Directory d = Directory(); System.String fileName = si.GetNormFileName(fi.number); if (!si.HasSeparateNorms(fi.number)) { d = cfsDir; } long normSeek = (fileName.EndsWith("." + IndexFileNames.NORMS_EXTENSION)?nextNormSeek:0); norms[fi.name] = new Norm(this, d.OpenInput(fileName), fi.number, normSeek); nextNormSeek += maxDoc; // increment also if some norms are separate } } }
internal static bool HasSeparateNorms(SegmentInfo si) { return si.HasSeparateNorms(); }
/// <summary>Returns true if this single nfo is optimized (has no /// pending norms or deletes, is in the same dir as the /// writer, and matches the current compound file setting /// </summary> private bool IsOptimized(IndexWriter writer, SegmentInfo info) { return(!info.HasDeletions() && !info.HasSeparateNorms() && info.dir == writer.GetDirectory() && info.GetUseCompoundFile() == useCompoundFile); }
internal static bool HasSeparateNorms(SegmentInfo si) { return(si.HasSeparateNorms()); }
internal virtual SegmentReader ReopenSegment(SegmentInfo si) { lock (this) { bool deletionsUpToDate = (this.si.HasDeletions() == si.HasDeletions()) && (!si.HasDeletions() || this.si.GetDelFileName().Equals(si.GetDelFileName())); bool normsUpToDate = true; bool[] fieldNormsChanged = new bool[fieldInfos.Size()]; if (normsUpToDate) { for (int i = 0; i < fieldInfos.Size(); i++) { if (!this.si.GetNormFileName(i).Equals(si.GetNormFileName(i))) { normsUpToDate = false; fieldNormsChanged[i] = true; } } } if (normsUpToDate && deletionsUpToDate) { return(this); } // clone reader SegmentReader clone; if (readOnly) { clone = new ReadOnlySegmentReader(); } else { clone = new SegmentReader(); } bool success = false; try { clone.readOnly = readOnly; clone.directory = directory; clone.si = si; clone.segment = segment; clone.readBufferSize = readBufferSize; clone.cfsReader = cfsReader; clone.storeCFSReader = storeCFSReader; clone.fieldInfos = fieldInfos; clone.tis = tis; clone.freqStream = freqStream; clone.proxStream = proxStream; clone.termVectorsReaderOrig = termVectorsReaderOrig; // we have to open a new FieldsReader, because it is not thread-safe // and can thus not be shared among multiple SegmentReaders // TODO: Change this in case FieldsReader becomes thread-safe in the future System.String fieldsSegment; Directory storeDir = Directory(); if (si.GetDocStoreOffset() != -1) { fieldsSegment = si.GetDocStoreSegment(); if (storeCFSReader != null) { storeDir = storeCFSReader; } } else { fieldsSegment = segment; if (cfsReader != null) { storeDir = cfsReader; } } if (fieldsReader != null) { clone.fieldsReader = new FieldsReader(storeDir, fieldsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount); } if (!deletionsUpToDate) { // load deleted docs clone.deletedDocs = null; clone.LoadDeletedDocs(); } else { clone.deletedDocs = this.deletedDocs; } clone.norms = new System.Collections.Hashtable(); if (!normsUpToDate) { // load norms for (int i = 0; i < fieldNormsChanged.Length; i++) { // copy unchanged norms to the cloned reader and incRef those norms if (!fieldNormsChanged[i]) { System.String curField = fieldInfos.FieldInfo(i).name; Norm norm = (Norm)this.norms[curField]; norm.IncRef(); clone.norms[curField] = norm; } } clone.OpenNorms(si.GetUseCompoundFile() ? cfsReader : Directory(), readBufferSize); } else { System.Collections.IEnumerator it = norms.Keys.GetEnumerator(); while (it.MoveNext()) { System.String field = (System.String)it.Current; Norm norm = (Norm)norms[field]; norm.IncRef(); clone.norms[field] = norm; } } if (clone.singleNormStream == null) { for (int i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (fi.isIndexed && !fi.omitNorms) { Directory d = si.GetUseCompoundFile() ? cfsReader : Directory(); System.String fileName = si.GetNormFileName(fi.number); if (si.HasSeparateNorms(fi.number)) { continue; } if (fileName.EndsWith("." + IndexFileNames.NORMS_EXTENSION)) { clone.singleNormStream = d.OpenInput(fileName, readBufferSize); break; } } } } success = true; } finally { if (this.referencedSegmentReader != null) { // this reader shares resources with another SegmentReader, // so we increment the other readers refCount. We don't // increment the refCount of the norms because we did // that already for the shared norms clone.referencedSegmentReader = this.referencedSegmentReader; referencedSegmentReader.IncRefReaderNotNorms(); } else { // this reader wasn't reopened, so we increment this // readers refCount clone.referencedSegmentReader = this; IncRefReaderNotNorms(); } if (!success) { // An exception occured during reopen, we have to decRef the norms // that we incRef'ed already and close singleNormsStream and FieldsReader clone.DecRef(); } } return(clone); } }
/// <summary>Returns true if this single info is optimized (has no /// pending norms or deletes, is in the same dir as the /// writer, and matches the current compound file setting /// </summary> private bool IsOptimized(SegmentInfo info) { bool hasDeletions = writer.NumDeletedDocs(info) > 0; return !hasDeletions && !info.HasSeparateNorms() && info.dir == writer.GetDirectory() && (info.GetUseCompoundFile() == useCompoundFile || noCFSRatio < 1.0); }
/// <summary>Returns true if this single nfo is optimized (has no /// pending norms or deletes, is in the same dir as the /// writer, and matches the current compound file setting /// </summary> private bool IsOptimized(IndexWriter writer, SegmentInfo info) { return !info.HasDeletions() && !info.HasSeparateNorms() && info.dir == writer.GetDirectory() && info.GetUseCompoundFile() == useCompoundFile; }
internal virtual SegmentReader ReopenSegment(SegmentInfo si) { lock (this) { bool deletionsUpToDate = (this.si.HasDeletions() == si.HasDeletions()) && (!si.HasDeletions() || this.si.GetDelFileName().Equals(si.GetDelFileName())); bool normsUpToDate = true; bool[] fieldNormsChanged = new bool[fieldInfos.Size()]; if (normsUpToDate) { for (int i = 0; i < fieldInfos.Size(); i++) { if (!this.si.GetNormFileName(i).Equals(si.GetNormFileName(i))) { normsUpToDate = false; fieldNormsChanged[i] = true; } } } if (normsUpToDate && deletionsUpToDate) { return this; } // clone reader SegmentReader clone; if (readOnly) clone = new ReadOnlySegmentReader(); else clone = new SegmentReader(); bool success = false; try { clone.readOnly = readOnly; clone.directory = directory; clone.si = si; clone.segment = segment; clone.readBufferSize = readBufferSize; clone.cfsReader = cfsReader; clone.storeCFSReader = storeCFSReader; clone.fieldInfos = fieldInfos; clone.tis = tis; clone.freqStream = freqStream; clone.proxStream = proxStream; clone.termVectorsReaderOrig = termVectorsReaderOrig; // we have to open a new FieldsReader, because it is not thread-safe // and can thus not be shared among multiple SegmentReaders // TODO: Change this in case FieldsReader becomes thread-safe in the future System.String fieldsSegment; Directory storeDir = Directory(); if (si.GetDocStoreOffset() != - 1) { fieldsSegment = si.GetDocStoreSegment(); if (storeCFSReader != null) { storeDir = storeCFSReader; } } else { fieldsSegment = segment; if (cfsReader != null) { storeDir = cfsReader; } } if (fieldsReader != null) { clone.fieldsReader = new FieldsReader(storeDir, fieldsSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount); } if (!deletionsUpToDate) { // load deleted docs clone.deletedDocs = null; clone.LoadDeletedDocs(); } else { clone.deletedDocs = this.deletedDocs; } clone.norms = new System.Collections.Hashtable(); if (!normsUpToDate) { // load norms for (int i = 0; i < fieldNormsChanged.Length; i++) { // copy unchanged norms to the cloned reader and incRef those norms if (!fieldNormsChanged[i]) { System.String curField = fieldInfos.FieldInfo(i).name; Norm norm = (Norm) this.norms[curField]; norm.IncRef(); clone.norms[curField] = norm; } } clone.OpenNorms(si.GetUseCompoundFile() ? cfsReader : Directory(), readBufferSize); } else { System.Collections.IEnumerator it = norms.Keys.GetEnumerator(); while (it.MoveNext()) { System.String field = (System.String) it.Current; Norm norm = (Norm) norms[field]; norm.IncRef(); clone.norms[field] = norm; } } if (clone.singleNormStream == null) { for (int i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (fi.isIndexed && !fi.omitNorms) { Directory d = si.GetUseCompoundFile() ? cfsReader : Directory(); System.String fileName = si.GetNormFileName(fi.number); if (si.HasSeparateNorms(fi.number)) { continue; } if (fileName.EndsWith("." + IndexFileNames.NORMS_EXTENSION)) { clone.singleNormStream = d.OpenInput(fileName, readBufferSize); break; } } } } success = true; } finally { if (this.referencedSegmentReader != null) { // this reader shares resources with another SegmentReader, // so we increment the other readers refCount. We don't // increment the refCount of the norms because we did // that already for the shared norms clone.referencedSegmentReader = this.referencedSegmentReader; referencedSegmentReader.IncRefReaderNotNorms(); } else { // this reader wasn't reopened, so we increment this // readers refCount clone.referencedSegmentReader = this; IncRefReaderNotNorms(); } if (!success) { // An exception occured during reopen, we have to decRef the norms // that we incRef'ed already and close singleNormsStream and FieldsReader clone.DecRef(); } } return clone; } }