protected override void Dispose(bool disposing) { if (disposing) { @in.Dispose(); } }
protected void Dispose(bool disposing) { if (disposing) { @in.Dispose(); } }
protected override void Dispose(bool disposing) { if (disposing) { _delegateFieldsConsumer.Dispose(); // Now we are done accumulating values for these fields var nonSaturatedBlooms = new List <KeyValuePair <FieldInfo, FuzzySet> >(); foreach (var entry in _bloomFilters) { var bloomFilter = entry.Value; if (!outerInstance._bloomFilterFactory.IsSaturated(bloomFilter, entry.Key)) { nonSaturatedBlooms.Add(entry); } } var bloomFileName = IndexFileNames.SegmentFileName( _state.SegmentInfo.Name, _state.SegmentSuffix, BLOOM_EXTENSION); IndexOutput bloomOutput = null; try { bloomOutput = _state.Directory.CreateOutput(bloomFileName, _state.Context); CodecUtil.WriteHeader(bloomOutput, /*BLOOM_CODEC_NAME*/ outerInstance.Name, VERSION_CURRENT); // remember the name of the postings format we will delegate to bloomOutput.WriteString(outerInstance._delegatePostingsFormat.Name); // First field in the output file is the number of fields+blooms saved bloomOutput.WriteInt32(nonSaturatedBlooms.Count); foreach (var entry in nonSaturatedBlooms) { var fieldInfo = entry.Key; var bloomFilter = entry.Value; bloomOutput.WriteInt32(fieldInfo.Number); SaveAppropriatelySizedBloomFilter(bloomOutput, bloomFilter, fieldInfo); } CodecUtil.WriteFooter(bloomOutput); } finally { IOUtils.Dispose(bloomOutput); } //We are done with large bitsets so no need to keep them hanging around _bloomFilters.Clear(); } }
private void Write(FieldInfos fieldInfos, Directory dir, FieldData[] fields, bool allowPreFlex) { int termIndexInterval = TestUtil.NextInt(Random(), 13, 27); Codec codec = Codec.Default; SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); SegmentWriteState state = new SegmentWriteState(InfoStream.Default, dir, si, fieldInfos, termIndexInterval, null, NewIOContext(Random())); FieldsConsumer consumer = codec.PostingsFormat().FieldsConsumer(state); Array.Sort(fields); foreach (FieldData field in fields) { if (!allowPreFlex && codec is Lucene3xCodec) { // code below expects unicode sort order continue; } field.Write(consumer); } consumer.Dispose(); }
public override void Dispose() { _delegateFieldsConsumer.Dispose(); // Now we are done accumulating values for these fields var nonSaturatedBlooms = (from entry in _bloomFilters.EntrySet() let bloomFilter = entry.Value where !_bfpf._bloomFilterFactory.IsSaturated(bloomFilter, entry.Key) select entry).ToList(); var bloomFileName = IndexFileNames.SegmentFileName( _state.SegmentInfo.Name, _state.SegmentSuffix, BLOOM_EXTENSION); IndexOutput bloomOutput = null; try { bloomOutput = _state.Directory.CreateOutput(bloomFileName, _state.Context); CodecUtil.WriteHeader(bloomOutput, BLOOM_CODEC_NAME, VERSION_CURRENT); // remember the name of the postings format we will delegate to bloomOutput.WriteString(_bfpf._delegatePostingsFormat.Name); // First field in the output file is the number of fields+blooms saved bloomOutput.WriteInt(nonSaturatedBlooms.Count); foreach (var entry in nonSaturatedBlooms) { var fieldInfo = entry.Key; var bloomFilter = entry.Value; bloomOutput.WriteInt(fieldInfo.Number); SaveAppropriatelySizedBloomFilter(bloomOutput, bloomFilter, fieldInfo); } CodecUtil.WriteFooter(bloomOutput); } finally { IOUtils.Close(bloomOutput); } //We are done with large bitsets so no need to keep them hanging around _bloomFilters.Clear(); }
public void Dispose() { Consumer.Dispose(); }