/// <summary> /// NOTE: This was loadVarIntsField() in Lucene. /// </summary> private NumericDocValues LoadVarInt32sField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.VAR_INTS_CODEC_NAME, Lucene40DocValuesFormat.VAR_INTS_VERSION_START, Lucene40DocValuesFormat.VAR_INTS_VERSION_CURRENT); var header = (sbyte)input.ReadByte(); if (header == Lucene40DocValuesFormat.VAR_INTS_FIXED_64) { int maxDoc = state.SegmentInfo.DocCount; var values = new long[maxDoc]; for (int i = 0; i < values.Length; i++) { values[i] = input.ReadInt64(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass(values)); } else if (header == Lucene40DocValuesFormat.VAR_INTS_PACKED) { long minValue = input.ReadInt64(); long defaultValue = input.ReadInt64(); PackedInt32s.Reader reader = PackedInt32s.GetReader(input); ramBytesUsed.AddAndGet(reader.RamBytesUsed()); return(new NumericDocValuesAnonymousClass2(minValue, defaultValue, reader)); } else { throw new CorruptIndexException("invalid VAR_INTS header byte: " + header + " (resource=" + input + ")"); } }
/// <summary> /// Returns total byte size used by cached filters. </summary> public virtual long GetSizeInBytes() { // Sync only to pull the current set of values: List <DocIdSet> docIdSets; lock (_cache) { #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR docIdSets = new List <DocIdSet>(); foreach (var pair in _cache) { docIdSets.Add(pair.Value); } #else docIdSets = new List <DocIdSet>(_cache.Values); #endif } long total = 0; foreach (DocIdSet dis in docIdSets) { total += RamUsageEstimator.SizeOf(dis); } return(total); }
private NumericDocValues LoadVarIntsField(FieldInfo field, IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.VAR_INTS_CODEC_NAME, Lucene40DocValuesFormat.VAR_INTS_VERSION_START, Lucene40DocValuesFormat.VAR_INTS_VERSION_CURRENT); byte header = input.ReadByte(); if (header == Lucene40DocValuesFormat.VAR_INTS_FIXED_64) { int maxDoc = State.SegmentInfo.DocCount; long[] values = new long[maxDoc]; for (int i = 0; i < values.Length; i++) { values[i] = input.ReadLong(); } RamBytesUsed_Renamed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper(this, values)); } else if (header == Lucene40DocValuesFormat.VAR_INTS_PACKED) { long minValue = input.ReadLong(); long defaultValue = input.ReadLong(); PackedInts.Reader reader = PackedInts.GetReader(input); RamBytesUsed_Renamed.AddAndGet(reader.RamBytesUsed()); return(new NumericDocValuesAnonymousInnerClassHelper2(this, minValue, defaultValue, reader)); } else { throw new CorruptIndexException("invalid VAR_INTS header byte: " + header + " (resource=" + input + ")"); } }
/// <summary> /// Returns total byte size used by cached filters. </summary> public virtual long GetSizeInBytes() { // Sync only to pull the current set of values: IList <DocIdSet> docIdSets; UninterruptableMonitor.Enter(_cache); try { #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR docIdSets = new JCG.List <DocIdSet>(); foreach (var pair in _cache) { docIdSets.Add(pair.Value); } #else docIdSets = new JCG.List <DocIdSet>(_cache.Values); #endif } finally { UninterruptableMonitor.Exit(_cache); } long total = 0; foreach (DocIdSet dis in docIdSets) { total += RamUsageEstimator.SizeOf(dis); } return(total); }
/** * Build a randomish document for both RAMDirectory and MemoryIndex, * and run all the queries against it. */ public void AssertAgainstRAMDirectory(MemoryIndex memory) { memory.Reset(); StringBuilder fooField = new StringBuilder(); StringBuilder termField = new StringBuilder(); // add up to 250 terms to field "foo" int numFooTerms = Random.nextInt(250 * RandomMultiplier); for (int i = 0; i < numFooTerms; i++) { fooField.append(" "); fooField.append(RandomTerm()); } // add up to 250 terms to field "term" int numTermTerms = Random.nextInt(250 * RandomMultiplier); for (int i = 0; i < numTermTerms; i++) { termField.append(" "); termField.append(RandomTerm()); } Store.Directory ramdir = new RAMDirectory(); Analyzer analyzer = RandomAnalyzer(); IndexWriter writer = new IndexWriter(ramdir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat()))); Document doc = new Document(); Field field1 = NewTextField("foo", fooField.toString(), Field.Store.NO); Field field2 = NewTextField("term", termField.toString(), Field.Store.NO); doc.Add(field1); doc.Add(field2); writer.AddDocument(doc); writer.Dispose(); memory.AddField("foo", fooField.toString(), analyzer); memory.AddField("term", termField.toString(), analyzer); if (Verbose) { Console.WriteLine("Random MemoryIndex:\n" + memory.toString()); Console.WriteLine("Same index as RAMDirectory: " + RamUsageEstimator.HumanReadableUnits(RamUsageEstimator.SizeOf(ramdir))); Console.WriteLine(); } else { assertTrue(memory.GetMemorySize() > 0L); } AtomicReader reader = (AtomicReader)memory.CreateSearcher().IndexReader; DirectoryReader competitor = DirectoryReader.Open(ramdir); DuellReaders(competitor, reader); IOUtils.Dispose(reader, competitor); AssertAllQueries(memory, ramdir, analyzer); ramdir.Dispose(); }
public override long RamBytesUsed() { return(RamUsageEstimator.AlignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * RamUsageEstimator.NUM_BYTES_INT32 // valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // values ref + RamUsageEstimator.SizeOf(values)); }
public override long RamBytesUsed() { return(RamUsageEstimator.AlignObjectSize( RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 3 * RamUsageEstimator.NUM_BYTES_INT32 // bpvMinusBlockSize,valueCount,bitsPerValue + RamUsageEstimator.NUM_BYTES_INT64 // maskRight + RamUsageEstimator.NUM_BYTES_OBJECT_REF) // blocks ref + RamUsageEstimator.SizeOf(blocks)); }
/// <summary> /// Returns total byte size used by cached filters. </summary> public virtual long SizeInBytes() { // Sync only to pull the current set of values: lock (_cache) { IList <DocIdSet> docIdSets = new List <DocIdSet>(_cache.Values); return(docIdSets.Sum(dis => RamUsageEstimator.SizeOf(dis))); } }
public long RamBytesUsed() { long mem = RamUsageEstimator.ShallowSizeOf(this) + RamUsageEstimator.SizeOf(Offsets); if (Offsets != Ordinals) { mem += RamUsageEstimator.SizeOf(Ordinals); } return(mem); }
/// <summary> /// Returns the approximate RAM bytes used. </summary> public long RamBytesUsed() { long sizeInBytes = 0; sizeInBytes += RamUsageEstimator.SizeOf(minValues); sizeInBytes += RamUsageEstimator.SizeOf(averages); foreach (PackedInt32s.Reader reader in subReaders) { sizeInBytes += reader.RamBytesUsed(); } return(sizeInBytes); }
private NumericDocValues LoadNumeric(FieldInfo field) { NumericEntry entry = numerics[field.Number]; data.Seek(entry.offset + entry.missingBytes); switch (entry.format) { case TABLE_COMPRESSED: int size = data.ReadVInt32(); if (size > 256) { throw new CorruptIndexException( "TABLE_COMPRESSED cannot have more than 256 distinct values, input=" + data); } var decode = new long[size]; for (int i = 0; i < decode.Length; i++) { decode[i] = data.ReadInt64(); } int formatID = data.ReadVInt32(); int bitsPerValue = data.ReadVInt32(); var ordsReader = PackedInt32s.GetReaderNoHeader(data, PackedInt32s.Format.ById(formatID), entry.packedIntsVersion, maxDoc, bitsPerValue); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed()); return(new NumericDocValuesAnonymousInnerClassHelper(this, decode, ordsReader)); case DELTA_COMPRESSED: int blockSize = data.ReadVInt32(); var reader = new BlockPackedReader(data, entry.packedIntsVersion, blockSize, maxDoc, false); ramBytesUsed.AddAndGet(reader.RamBytesUsed()); return(reader); case UNCOMPRESSED: var bytes = new byte[maxDoc]; data.ReadBytes(bytes, 0, bytes.Length); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes)); // LUCENENET: IMPORTANT - some bytes are negative here, so we need to pass as sbyte return(new NumericDocValuesAnonymousInnerClassHelper2(this, (sbyte[])(Array)bytes)); case GCD_COMPRESSED: long min = data.ReadInt64(); long mult = data.ReadInt64(); int quotientBlockSize = data.ReadVInt32(); var quotientReader = new BlockPackedReader(data, entry.packedIntsVersion, quotientBlockSize, maxDoc, false); ramBytesUsed.AddAndGet(quotientReader.RamBytesUsed()); return(new NumericDocValuesAnonymousInnerClassHelper3(this, min, mult, quotientReader)); default: throw new InvalidOperationException(); } }
private NumericDocValues LoadNumeric(NumericEntry entry) { data.Seek(entry.offset + entry.missingBytes); switch (entry.byteWidth) { case 1: { var values = new byte[entry.count]; data.ReadBytes(values, 0, entry.count); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); // LUCENENET: IMPORTANT - some bytes are negative here, so we need to pass as sbyte return(new NumericDocValuesAnonymousInnerClassHelper((sbyte[])(Array)values)); } case 2: { var values = new short[entry.count]; for (int i = 0; i < entry.count; i++) { values[i] = data.ReadShort(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper2(this, values)); } case 4: { var values = new int[entry.count]; for (var i = 0; i < entry.count; i++) { values[i] = data.ReadInt(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper3(values)); } case 8: { var values = new long[entry.count]; for (int i = 0; i < entry.count; i++) { values[i] = data.ReadLong(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper4(values)); } default: throw new System.InvalidOperationException(); } }
private NumericDocValues LoadNumeric(NumericEntry entry) { data.Seek(entry.offset + entry.missingBytes); switch (entry.byteWidth) { case 1: { var values = new byte[entry.count]; data.ReadBytes(values, 0, entry.count); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); // LUCENENET: IMPORTANT - some bytes are negative here, so we need to pass as sbyte return(new NumericDocValuesAnonymousClass((sbyte[])(Array)values)); } case 2: { var values = new short[entry.count]; for (int i = 0; i < entry.count; i++) { values[i] = data.ReadInt16(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass2(values)); } case 4: { var values = new int[entry.count]; for (var i = 0; i < entry.count; i++) { values[i] = data.ReadInt32(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass3(values)); } case 8: { var values = new long[entry.count]; for (int i = 0; i < entry.count; i++) { values[i] = data.ReadInt64(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass4(values)); } default: throw AssertionError.Create(); } }
/// <summary> /// Returns approximate RAM bytes used </summary> public virtual long RamBytesUsed() { long sizeInBytes = 0; sizeInBytes += (positions != null) ? RamUsageEstimator.SizeOf(positions) : 0; if (payloads != null) { foreach (var payload in payloads) { sizeInBytes += (payload != null) ? RamUsageEstimator.SizeOf(payload) : 0; } } return(sizeInBytes); }
private NumericDocValues LoadNumeric(FieldInfo field) { NumericEntry entry = Numerics[field.Number]; Data.Seek(entry.Offset); switch (entry.Format) { case TABLE_COMPRESSED: int size = Data.ReadVInt(); if (size > 256) { throw new CorruptIndexException("TABLE_COMPRESSED cannot have more than 256 distinct values, input=" + Data); } long[] decode = new long[size]; for (int i = 0; i < decode.Length; i++) { decode[i] = Data.ReadLong(); } int formatID = Data.ReadVInt(); int bitsPerValue = Data.ReadVInt(); PackedInts.Reader ordsReader = PackedInts.GetReaderNoHeader(Data, PackedInts.Format.ById(formatID), entry.PackedIntsVersion, MaxDoc, bitsPerValue); RamBytesUsed_Renamed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed()); return(new NumericDocValuesAnonymousInnerClassHelper(this, decode, ordsReader)); case DELTA_COMPRESSED: int blockSize = Data.ReadVInt(); BlockPackedReader reader = new BlockPackedReader(Data, entry.PackedIntsVersion, blockSize, MaxDoc, false); RamBytesUsed_Renamed.AddAndGet(reader.RamBytesUsed()); return(reader); case UNCOMPRESSED: byte[] bytes = new byte[MaxDoc]; Data.ReadBytes(bytes, 0, bytes.Length); RamBytesUsed_Renamed.AddAndGet(RamUsageEstimator.SizeOf(bytes)); return(new NumericDocValuesAnonymousInnerClassHelper2(this, bytes)); case GCD_COMPRESSED: long min = Data.ReadLong(); long mult = Data.ReadLong(); int quotientBlockSize = Data.ReadVInt(); BlockPackedReader quotientReader = new BlockPackedReader(Data, entry.PackedIntsVersion, quotientBlockSize, MaxDoc, false); RamBytesUsed_Renamed.AddAndGet(quotientReader.RamBytesUsed()); return(new NumericDocValuesAnonymousInnerClassHelper3(this, min, mult, quotientReader)); default: throw new InvalidOperationException(); } }
private NumericDocValues LoadNumeric(FieldInfo field) { NumericEntry entry = numerics[field.Number]; data.Seek(entry.Offset); switch (entry.Format) { case TABLE_COMPRESSED: int size = data.ReadVInt32(); if (size > 256) { throw new CorruptIndexException("TABLE_COMPRESSED cannot have more than 256 distinct values, input=" + data); } var decode = new long[size]; for (int i = 0; i < decode.Length; i++) { decode[i] = data.ReadInt64(); } int formatID = data.ReadVInt32(); int bitsPerValue = data.ReadVInt32(); PackedInt32s.Reader ordsReader = PackedInt32s.GetReaderNoHeader(data, PackedInt32s.Format.ById(formatID), entry.PackedInt32sVersion, maxDoc, bitsPerValue); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed()); return(new NumericDocValuesAnonymousClass(decode, ordsReader)); case DELTA_COMPRESSED: int blockSize = data.ReadVInt32(); var reader = new BlockPackedReader(data, entry.PackedInt32sVersion, blockSize, maxDoc, false); ramBytesUsed.AddAndGet(reader.RamBytesUsed()); return(reader); case UNCOMPRESSED: byte[] bytes = new byte[maxDoc]; data.ReadBytes(bytes, 0, bytes.Length); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes)); return(new NumericDocValuesAnonymousClass2(bytes)); case GCD_COMPRESSED: long min = data.ReadInt64(); long mult = data.ReadInt64(); int quotientBlockSize = data.ReadVInt32(); BlockPackedReader quotientReader = new BlockPackedReader(data, entry.PackedInt32sVersion, quotientBlockSize, maxDoc, false); ramBytesUsed.AddAndGet(quotientReader.RamBytesUsed()); return(new NumericDocValuesAnonymousClass3(min, mult, quotientReader)); default: throw AssertionError.Create(); } }
private NumericDocValues LoadByteField(FieldInfo field, IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt(); if (valueSize != 1) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = State.SegmentInfo.DocCount; sbyte[] values = new sbyte[maxDoc]; input.ReadBytes(values, 0, values.Length); RamBytesUsed_Renamed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper3(this, values)); }
private NumericDocValues LoadByteField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.INTS_CODEC_NAME, Lucene40DocValuesFormat.INTS_VERSION_START, Lucene40DocValuesFormat.INTS_VERSION_CURRENT); int valueSize = input.ReadInt32(); if (valueSize != 1) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = state.SegmentInfo.DocCount; var values = new byte[maxDoc]; input.ReadBytes(values, 0, values.Length); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass3(values)); }
public override long RamBytesUsed() { long ramBytesUsed = 0; foreach (TermsReader r in fields.Values) { if (r.index != null) { ramBytesUsed += r.index.SizeInBytes(); ramBytesUsed += RamUsageEstimator.SizeOf(r.metaBytesBlock); ramBytesUsed += RamUsageEstimator.SizeOf(r.metaLongsBlock); ramBytesUsed += RamUsageEstimator.SizeOf(r.skipInfo); ramBytesUsed += RamUsageEstimator.SizeOf(r.statsBlock); } } return ramBytesUsed; }
/// <summary> /// Returns total byte size used by cached filters. </summary> public virtual long SizeInBytes() { // Sync only to pull the current set of values: IList <DocIdSet> docIdSets; lock (Cache) { docIdSets = new List <DocIdSet>(Cache.Values); } long total = 0; foreach (DocIdSet dis in docIdSets) { total += RamUsageEstimator.SizeOf(dis); } return(total); }
/// <summary> /// NOTE: This was loadFloatField() in Lucene. /// </summary> private NumericDocValues LoadSingleField(FieldInfo field, IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.FLOATS_CODEC_NAME, Lucene40DocValuesFormat.FLOATS_VERSION_START, Lucene40DocValuesFormat.FLOATS_VERSION_CURRENT); int valueSize = input.ReadInt32(); if (valueSize != 4) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = state.SegmentInfo.DocCount; int[] values = new int[maxDoc]; for (int i = 0; i < values.Length; i++) { values[i] = input.ReadInt32(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousInnerClassHelper7(values)); }
private NumericDocValues LoadDoubleField(/*FieldInfo field, // LUCENENET: Never read */ IndexInput input) { CodecUtil.CheckHeader(input, Lucene40DocValuesFormat.FLOATS_CODEC_NAME, Lucene40DocValuesFormat.FLOATS_VERSION_START, Lucene40DocValuesFormat.FLOATS_VERSION_CURRENT); int valueSize = input.ReadInt32(); if (valueSize != 8) { throw new CorruptIndexException("invalid valueSize: " + valueSize); } int maxDoc = state.SegmentInfo.DocCount; long[] values = new long[maxDoc]; for (int i = 0; i < values.Length; i++) { values[i] = input.ReadInt64(); } ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(values)); return(new NumericDocValuesAnonymousClass8(values)); }
/// <summary> /// Returns total byte size used by cached filters. </summary> public virtual long GetSizeInBytes() { // Sync only to pull the current set of values: IList <DocIdSet> docIdSets; UninterruptableMonitor.Enter(cache); try { docIdSets = new JCG.List <DocIdSet>(); #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR foreach (var pair in cache) { docIdSets.Add(pair.Value); } #else // LUCENENET specific - since .NET Standard 2.0 and .NET Framework don't have a CondtionalWeakTable enumerator, // we use a weak event to retrieve the DocIdSet instances. We look each of these up here to avoid the need // to attach events to the DocIdSet instances themselves (thus using the existing IndexReader.Dispose() // method to detach the events rather than using a finalizer in DocIdSet to ensure they are cleaned up). var e = new Events.GetCacheKeysEventArgs(); eventAggregator.GetEvent <Events.GetCacheKeysEvent>().Publish(e); foreach (var key in e.CacheKeys) { if (cache.TryGetValue(key, out DocIdSet value)) { docIdSets.Add(value); } } #endif } finally { UninterruptableMonitor.Exit(cache); } long total = 0; foreach (DocIdSet dis in docIdSets) { total += RamUsageEstimator.SizeOf(dis); } return(total); }
private BinaryDocValues LoadBinary(BinaryEntry entry) { data.Seek(entry.offset); var bytes = new byte[entry.numBytes]; data.ReadBytes(bytes, 0, entry.numBytes); data.Seek(entry.offset + entry.numBytes + entry.missingBytes); var address = new int[entry.count + 1]; for (int i = 0; i < entry.count; i++) { address[i] = data.ReadInt(); } address[entry.count] = data.ReadInt(); ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes) + RamUsageEstimator.SizeOf(address)); return(new BinaryDocValuesAnonymousInnerClassHelper(bytes, address)); }
internal long RamBytesUsed() { long res = 0; foreach (PackedInt32s.Reader r in docBasesDeltas) { res += r.RamBytesUsed(); } foreach (PackedInt32s.Reader r in startPointersDeltas) { res += r.RamBytesUsed(); } res += RamUsageEstimator.SizeOf(docBases); res += RamUsageEstimator.SizeOf(startPointers); res += RamUsageEstimator.SizeOf(avgChunkDocs); res += RamUsageEstimator.SizeOf(avgChunkSizes); return(res); }
private long DocsWithFieldBytesUsed() { // size of the long[] + some overhead return(RamUsageEstimator.SizeOf(docsWithField.Bits) + 64); }
/// <summary> /// Returns a String representation of the index data for debugging purposes. /// </summary> /// <returns> the string representation </returns> public override string ToString() { StringBuilder result = new StringBuilder(256); SortFields(); int sumPositions = 0; int sumTerms = 0; BytesRef spare = new BytesRef(); for (int i = 0; i < sortedFields.Length; i++) { KeyValuePair <string, Info> entry = sortedFields[i]; string fieldName = entry.Key; Info info = entry.Value; info.SortTerms(); result.Append(fieldName + ":\n"); SliceByteStartArray sliceArray = info.sliceArray; int numPositions = 0; Int32BlockPool.SliceReader postingsReader = new Int32BlockPool.SliceReader(intBlockPool); for (int j = 0; j < info.terms.Count; j++) { int ord = info.sortedTerms[j]; info.terms.Get(ord, spare); int freq = sliceArray.freq[ord]; result.Append("\t'" + spare + "':" + freq + ":"); postingsReader.Reset(sliceArray.start[ord], sliceArray.end[ord]); result.Append(" ["); int iters = storeOffsets ? 3 : 1; while (!postingsReader.IsEndOfSlice) { result.Append("("); for (int k = 0; k < iters; k++) { result.Append(postingsReader.ReadInt32()); if (k < iters - 1) { result.Append(", "); } } result.Append(")"); if (!postingsReader.IsEndOfSlice) { result.Append(","); } } result.Append("]"); result.Append("\n"); numPositions += freq; } result.Append("\tterms=" + info.terms.Count); result.Append(", positions=" + numPositions); result.Append(", memory=" + RamUsageEstimator.HumanReadableUnits(RamUsageEstimator.SizeOf(info))); result.Append("\n"); sumPositions += numPositions; sumTerms += info.terms.Count; } result.Append("\nfields=" + sortedFields.Length); result.Append(", terms=" + sumTerms); result.Append(", positions=" + sumPositions); result.Append(", memory=" + RamUsageEstimator.HumanReadableUnits(GetMemorySize())); return(result.ToString()); }
/// <summary> /// Returns a reasonable approximation of the main memory [bytes] consumed by /// this instance. Useful for smart memory sensititive caches/pools. </summary> /// <returns> the main memory consumption </returns> public virtual long GetMemorySize() { return(RamUsageEstimator.SizeOf(this)); }
public virtual long RamBytesUsed() { return(RamUsageEstimator.SizeOf(_filter.GetBits())); }
/// <summary> /// Returns total byte size used by cached filters. </summary> public virtual long GetSizeInBytes() { IList <DocIdSet> docIdSets = new List <DocIdSet>(_cache.Values); return(docIdSets.Sum(dis => RamUsageEstimator.SizeOf(dis))); }