// LUCENENE specific - original was internal, but FreqProxTermsWriter requires public (little point, since both are internal classes) public override void Flush(IDictionary <string, TermsHashConsumerPerField> fieldsToFlush, SegmentWriteState state) { if (writer != null) { int numDocs = state.SegmentInfo.DocCount; Debug.Assert(numDocs > 0); // At least one doc in this run had term vectors enabled try { Fill(numDocs); Debug.Assert(state.SegmentInfo != null); writer.Finish(state.FieldInfos, numDocs); } finally { IOUtils.Close(writer); writer = null; lastDocID = 0; hasVectors = false; } } foreach (TermsHashConsumerPerField field in fieldsToFlush.Values) { TermVectorsConsumerPerField perField = (TermVectorsConsumerPerField)field; perField.termsHashPerField.Reset(); perField.ShrinkHash(); } }
internal void AddFieldToFlush(TermVectorsConsumerPerField fieldToFlush) { if (numVectorFields == perFields.Length) { int newSize = ArrayUtil.Oversize(numVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize]; Array.Copy(perFields, 0, newArray, 0, numVectorFields); perFields = newArray; } perFields[numVectorFields++] = fieldToFlush; }
internal void AddFieldToFlush(TermVectorsConsumerPerField fieldToFlush) { if (NumVectorFields == PerFields.Length) { int newSize = ArrayUtil.Oversize(NumVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize]; Array.Copy(PerFields, 0, newArray, 0, NumVectorFields); PerFields = newArray; } PerFields[NumVectorFields++] = fieldToFlush; }