public DocFieldProcessor(DocumentsWriter docWriter, DocFieldConsumer consumer) { this.docWriter = docWriter; this.consumer = consumer; consumer.SetFieldInfos(fieldInfos); fieldsWriter = new StoredFieldsWriter(docWriter, fieldInfos); }
public override void Abort() { Reset(); if (FieldsWriter != null) { FieldsWriter.Abort(); FieldsWriter = null; LastDocID = 0; } }
private void InitFieldsWriter(IOContext context) { lock (this) { if (fieldsWriter == null) { fieldsWriter = codec.StoredFieldsFormat.FieldsWriter(docWriter.directory, docWriter.SegmentInfo, context); lastDocID = 0; } } }
public override void Abort() { Reset(); if (fieldsWriter != null) { fieldsWriter.Abort(); fieldsWriter = null; lastDocID = 0; } }
private void InitFieldsWriter(IOContext context) { lock (this) { if (FieldsWriter == null) { FieldsWriter = Codec.StoredFieldsFormat().FieldsWriter(DocWriter.Directory, DocWriter.SegmentInfo, context); LastDocID = 0; } } }
/// /// <returns> The number of documents in all of the readers </returns> /// <exception cref="CorruptIndexException"> if the index is corrupt </exception> /// <exception cref="IOException"> if there is a low-level IO error </exception> private int MergeFields() { StoredFieldsWriter fieldsWriter = Codec.StoredFieldsFormat().FieldsWriter(Directory, MergeState.SegmentInfo, Context); try { return(fieldsWriter.Merge(MergeState)); } finally { fieldsWriter.Dispose(); } }
/// /// <returns> The number of documents in all of the readers </returns> /// <exception cref="CorruptIndexException"> if the index is corrupt </exception> /// <exception cref="IOException"> if there is a low-level IO error </exception> private int MergeFields() { StoredFieldsWriter fieldsWriter = codec.StoredFieldsFormat.FieldsWriter(directory, mergeState.SegmentInfo, context); try { return(fieldsWriter.Merge(mergeState)); } finally { fieldsWriter.Dispose(); } }
private void InitFieldsWriter(IOContext context) { UninterruptableMonitor.Enter(this); try { if (fieldsWriter == null) { fieldsWriter = codec.StoredFieldsFormat.FieldsWriter(docWriter.directory, docWriter.SegmentInfo, context); lastDocID = 0; } } finally { UninterruptableMonitor.Exit(this); } }
private void InitBlock(StoredFieldsWriter enclosingInstance) { this.enclosingInstance = enclosingInstance; }
private void InitBlock(StoredFieldsWriter enclosingInstance) { this.enclosingInstance = enclosingInstance; buffer = enclosingInstance.docWriter.NewPerDocBuffer(); fdt = new RAMOutputStream(buffer); }
public PerDoc(StoredFieldsWriter enclosingInstance) { InitBlock(enclosingInstance); }
public StoredFieldsWriterPerThread(DocFieldProcessorPerThread docFieldProcessorPerThread, StoredFieldsWriter storedFieldsWriter) { this.storedFieldsWriter = storedFieldsWriter; this.docState = docFieldProcessorPerThread.docState; localFieldsWriter = new FieldsWriter((IndexOutput)null, (IndexOutput)null, storedFieldsWriter.fieldInfos); }
public StoredFieldsWriterPerThread(DocumentsWriter.DocState docState, StoredFieldsWriter storedFieldsWriter) { this.storedFieldsWriter = storedFieldsWriter; this.docState = docState; localFieldsWriter = new FieldsWriter((IndexOutput) null, (IndexOutput) null, storedFieldsWriter.fieldInfos); }
internal PerDoc(StoredFieldsWriter enclosing_instance) { this.enclosing_instance = enclosing_instance; }
internal DocumentsWriter(Directory directory, IndexWriter writer) { this.directory = directory; this.writer = writer; this.similarity = writer.GetSimilarity(); flushedDocCount = writer.MaxDoc(); byteBlockAllocator = new ByteBlockAllocator(this); waitQueue = new WaitQueue(this); /* This is the current indexing chain: DocConsumer / DocConsumerPerThread --> code: DocFieldProcessor / DocFieldProcessorPerThread --> DocFieldConsumer / DocFieldConsumerPerThread / DocFieldConsumerPerField --> code: DocFieldConsumers / DocFieldConsumersPerThread / DocFieldConsumersPerField --> code: DocInverter / DocInverterPerThread / DocInverterPerField --> InvertedDocConsumer / InvertedDocConsumerPerThread / InvertedDocConsumerPerField --> code: TermsHash / TermsHashPerThread / TermsHashPerField --> TermsHashConsumer / TermsHashConsumerPerThread / TermsHashConsumerPerField --> code: FreqProxTermsWriter / FreqProxTermsWriterPerThread / FreqProxTermsWriterPerField --> code: TermVectorsTermsWriter / TermVectorsTermsWriterPerThread / TermVectorsTermsWriterPerField --> InvertedDocEndConsumer / InvertedDocConsumerPerThread / InvertedDocConsumerPerField --> code: NormsWriter / NormsWriterPerThread / NormsWriterPerField --> code: StoredFieldsWriter / StoredFieldsWriterPerThread / StoredFieldsWriterPerField */ // TODO FI: this should be something the user can pass in // Build up indexing chain: TermsHashConsumer termVectorsWriter = new TermVectorsTermsWriter(this); TermsHashConsumer freqProxWriter = new FreqProxTermsWriter(); InvertedDocConsumer termsHash = new TermsHash(this, true, freqProxWriter, new TermsHash(this, false, termVectorsWriter, null)); NormsWriter normsWriter = new NormsWriter(); DocInverter docInverter = new DocInverter(termsHash, normsWriter); StoredFieldsWriter fieldsWriter = new StoredFieldsWriter(this); DocFieldConsumers docFieldConsumers = new DocFieldConsumers(docInverter, fieldsWriter); consumer = docFieldProcessor = new DocFieldProcessor(this, docFieldConsumers); }
public StoredFieldsWriterPerThread(DocumentsWriter.DocState docState, StoredFieldsWriter storedFieldsWriter) { this.storedFieldsWriter = storedFieldsWriter; this.docState = docState; localFieldsWriter = new FieldsWriter((IndexOutput)null, (IndexOutput)null, storedFieldsWriter.fieldInfos); }