internal override void FinishDocument(TermsHash termsHash) { Debug.Assert(docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); if (!hasVectors) { return; } InitTermVectorsWriter(); Fill(docState.docID); // Append term vectors to the real outputs: writer.StartDocument(numVectorFields); for (int i = 0; i < numVectorFields; i++) { perFields[i].FinishDocument(); } writer.FinishDocument(); Debug.Assert(lastDocID == docState.docID, "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); lastDocID++; termsHash.Reset(); Reset(); Debug.Assert(docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); }
public override void FinishDocument(TermsHash termsHash) { Debug.Assert(DocWriter.TestPoint("TermVectorsTermsWriter.finishDocument start")); if (!HasVectors) { return; } InitTermVectorsWriter(); Fill(DocState.DocID); // Append term vectors to the real outputs: Writer.StartDocument(NumVectorFields); for (int i = 0; i < NumVectorFields; i++) { PerFields[i].FinishDocument(); } Writer.FinishDocument(); Debug.Assert(LastDocID == DocState.DocID, "lastDocID=" + LastDocID + " docState.docID=" + DocState.DocID); LastDocID++; termsHash.Reset(); Reset(); Debug.Assert(DocWriter.TestPoint("TermVectorsTermsWriter.finishDocument end")); }
internal override void FinishDocument(TermsHash termsHash) { // LUCENENET: .NET doesn't support asserts in release mode if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) { docWriter.TestPoint("TermVectorsTermsWriter.finishDocument start"); } if (!hasVectors) { return; } InitTermVectorsWriter(); Fill(docState.docID); // Append term vectors to the real outputs: writer.StartDocument(numVectorFields); for (int i = 0; i < numVectorFields; i++) { perFields[i].FinishDocument(); } writer.FinishDocument(); Debug.Assert(lastDocID == docState.docID, "lastDocID=" + lastDocID + " docState.docID=" + docState.docID); lastDocID++; termsHash.Reset(); Reset(); // LUCENENET: .NET doesn't support asserts in release mode if (Lucene.Net.Diagnostics.Debugging.AssertsEnabled) { docWriter.TestPoint("TermVectorsTermsWriter.finishDocument end"); } }
public override void Flush(IDictionary <string, TermsHashConsumerPerField> fieldsToFlush, SegmentWriteState state) { // Gather all FieldData's that have postings, across all // ThreadStates IList <FreqProxTermsWriterPerField> allFields = new List <FreqProxTermsWriterPerField>(); foreach (TermsHashConsumerPerField f in fieldsToFlush.Values) { FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField)f; if (perField.termsHashPerField.bytesHash.Count > 0) { allFields.Add(perField); } } int numAllFields = allFields.Count; // Sort by field name CollectionUtil.IntroSort(allFields); FieldsConsumer consumer = state.SegmentInfo.Codec.PostingsFormat.FieldsConsumer(state); bool success = false; try { TermsHash termsHash = null; /* * Current writer chain: * FieldsConsumer * -> IMPL: FormatPostingsTermsDictWriter * -> TermsConsumer * -> IMPL: FormatPostingsTermsDictWriter.TermsWriter * -> DocsConsumer * -> IMPL: FormatPostingsDocsWriter * -> PositionsConsumer * -> IMPL: FormatPostingsPositionsWriter */ for (int fieldNumber = 0; fieldNumber < numAllFields; fieldNumber++) { FieldInfo fieldInfo = allFields[fieldNumber].fieldInfo; FreqProxTermsWriterPerField fieldWriter = allFields[fieldNumber]; // If this field has postings then add them to the // segment fieldWriter.Flush(fieldInfo.Name, consumer, state); TermsHashPerField perField = fieldWriter.termsHashPerField; if (Debugging.AssertsEnabled) { Debugging.Assert(termsHash == null || termsHash == perField.termsHash); } termsHash = perField.termsHash; int numPostings = perField.bytesHash.Count; perField.Reset(); perField.ShrinkHash(numPostings); fieldWriter.Reset(); } if (termsHash != null) { termsHash.Reset(); } success = true; } finally { if (success) { IOUtils.Dispose(consumer); } else { IOUtils.DisposeWhileHandlingException(consumer); } } }