public /*internal*/ SegmentTermDocs(SegmentReader parent) { this.parent = parent; this.freqStream = (InputStream) parent.freqStream.Clone(); this.deletedDocs = parent.deletedDocs; this.skipInterval = parent.tis.GetSkipInterval(); }
public override System.Object DoBody() { SegmentInfos infos = new SegmentInfos(); infos.Read(directory); if (infos.Count == 1) { // index is optimized return new SegmentReader(infos, infos.Info(0), closeDirectory); } else { Monodoc.Lucene.Net.Index.IndexReader[] readers = new Monodoc.Lucene.Net.Index.IndexReader[infos.Count]; for (int i = 0; i < infos.Count; i++) readers[i] = new SegmentReader(infos.Info(i)); return new MultiReader(directory, infos, closeDirectory, readers); } }
public Norm(SegmentReader enclosingInstance, InputStream in_Renamed, int number) { InitBlock(enclosingInstance); this.in_Renamed = in_Renamed; this.number = number; }
private void InitBlock(SegmentReader enclosingInstance) { this.enclosingInstance = enclosingInstance; }
internal SegmentTermPositions(SegmentReader p):base(p) { this.proxStream = (InputStream) parent.proxStream.Clone(); }
/// <summary>Pops segments off of segmentInfos stack down to minSegment, merges them, /// and pushes the merged index onto the top of the segmentInfos stack. /// </summary> private void MergeSegments(int minSegment) { System.String mergedName = NewSegmentName(); if (infoStream != null) infoStream.Write("merging segments"); SegmentMerger merger = new SegmentMerger(directory, mergedName, useCompoundFile); System.Collections.ArrayList segmentsToDelete = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); for (int i = minSegment; i < segmentInfos.Count; i++) { SegmentInfo si = segmentInfos.Info(i); if (infoStream != null) infoStream.Write(" " + si.name + " (" + si.docCount + " docs)"); Monodoc.Lucene.Net.Index.IndexReader reader = new SegmentReader(si); merger.Add(reader); if ((reader.Directory() == this.directory) || (reader.Directory() == this.ramDirectory)) segmentsToDelete.Add(reader); // queue segment for deletion } int mergedDocCount = merger.Merge(); if (infoStream != null) { infoStream.WriteLine(" into " + mergedName + " (" + mergedDocCount + " docs)"); } segmentInfos.RemoveRange(minSegment, segmentInfos.Count - minSegment); // pop old infos & add new segmentInfos.Add(new SegmentInfo(mergedName, mergedDocCount, directory)); // close readers before we attempt to delete now-obsolete segments merger.CloseReaders(); lock (directory) { // in- & inter-process sync new AnonymousClassWith2(segmentsToDelete, this, directory.MakeLock(IndexWriter.COMMIT_LOCK_NAME), COMMIT_LOCK_TIMEOUT).Run(); } }