/// <summary> /// Create a new <seealso cref="ForUtil"/> instance and save state into <code>out</code>. /// </summary> public ForUtil(float acceptableOverheadRatio, DataOutput @out) { @out.WriteVInt(PackedInts.VERSION_CURRENT); EncodedSizes = new int[33]; Encoders = new PackedInts.Encoder[33]; Decoders = new PackedInts.Decoder[33]; Iterations = new int[33]; for (int bpv = 1; bpv <= 32; ++bpv) { PackedInts.FormatAndBits formatAndBits = PackedInts.FastestFormatAndBits(Lucene41PostingsFormat.BLOCK_SIZE, bpv, acceptableOverheadRatio); Debug.Assert(formatAndBits.format.IsSupported(formatAndBits.bitsPerValue)); Debug.Assert(formatAndBits.bitsPerValue <= 32); EncodedSizes[bpv] = EncodedSize(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); Encoders[bpv] = PackedInts.GetEncoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); Decoders[bpv] = PackedInts.GetDecoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); Iterations[bpv] = ComputeIterations(Decoders[bpv]); @out.WriteVInt(formatAndBits.format.id << 5 | (formatAndBits.bitsPerValue - 1)); } }
internal virtual void AddNumericField(FieldInfo field, IEnumerable <long> values, bool optimizeStorage) { Meta.WriteVInt(field.Number); Meta.WriteByte(Lucene42DocValuesProducer.NUMBER); Meta.WriteLong(Data.FilePointer); long minValue = long.MaxValue; long maxValue = long.MinValue; long gcd = 0; // TODO: more efficient? HashSet <long> uniqueValues = null; if (optimizeStorage) { uniqueValues = new HashSet <long>(); long count = 0; foreach (long nv in values) { // TODO: support this as MemoryDVFormat (and be smart about missing maybe) long v = nv == null ? 0 : (long)nv; if (gcd != 1) { if (v < long.MinValue / 2 || v > long.MaxValue / 2) { // in that case v - minValue might overflow and make the GCD computation return // wrong results. Since these extreme values are unlikely, we just discard // GCD computation for them gcd = 1; } // minValue needs to be set first else if (count != 0) { gcd = MathUtil.Gcd(gcd, v - minValue); } } minValue = Math.Min(minValue, v); maxValue = Math.Max(maxValue, v); if (uniqueValues != null) { if (uniqueValues.Add(v)) { if (uniqueValues.Count > 256) { uniqueValues = null; } } } ++count; } Debug.Assert(count == MaxDoc); } if (uniqueValues != null) { // small number of unique values int bitsPerValue = PackedInts.BitsRequired(uniqueValues.Count - 1); FormatAndBits formatAndBits = PackedInts.FastestFormatAndBits(MaxDoc, bitsPerValue, AcceptableOverheadRatio); if (formatAndBits.bitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue) { Meta.WriteByte(Lucene42DocValuesProducer.UNCOMPRESSED); // uncompressed foreach (long nv in values) { Data.WriteByte(nv == null ? (byte)0 : (byte)nv); } } else { Meta.WriteByte(Lucene42DocValuesProducer.TABLE_COMPRESSED); // table-compressed long[] decode = uniqueValues.ToArray(/*new long?[uniqueValues.Count]*/); Dictionary <long, int> encode = new Dictionary <long, int>(); Data.WriteVInt(decode.Length); for (int i = 0; i < decode.Length; i++) { Data.WriteLong(decode[i]); encode[decode[i]] = i; } Meta.WriteVInt(PackedInts.VERSION_CURRENT); Data.WriteVInt(formatAndBits.format.id); Data.WriteVInt(formatAndBits.bitsPerValue); PackedInts.Writer writer = PackedInts.GetWriterNoHeader(Data, formatAndBits.format, MaxDoc, formatAndBits.bitsPerValue, PackedInts.DEFAULT_BUFFER_SIZE); foreach (long nv in values) { writer.Add(encode[nv == null ? 0 : (long)nv]); } writer.Finish(); } } else if (gcd != 0 && gcd != 1) { Meta.WriteByte(Lucene42DocValuesProducer.GCD_COMPRESSED); Meta.WriteVInt(PackedInts.VERSION_CURRENT); Data.WriteLong(minValue); Data.WriteLong(gcd); Data.WriteVInt(Lucene42DocValuesProducer.BLOCK_SIZE); BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE); foreach (long nv in values) { long value = nv == null ? 0 : (long)nv; writer.Add((value - minValue) / gcd); } writer.Finish(); } else { Meta.WriteByte(Lucene42DocValuesProducer.DELTA_COMPRESSED); // delta-compressed Meta.WriteVInt(PackedInts.VERSION_CURRENT); Data.WriteVInt(Lucene42DocValuesProducer.BLOCK_SIZE); BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE); foreach (long nv in values) { writer.Add(nv == null ? 0 : (long)nv); } writer.Finish(); } }
public override void AddNumericField(FieldInfo field, IEnumerable <long?> values) { Meta.WriteVInt(field.Number); Meta.WriteByte((byte)NUMBER); Meta.WriteLong(Data.FilePointer); long minValue = long.MaxValue; long maxValue = long.MinValue; long gcd = 0; // TODO: more efficient? HashSet <long> uniqueValues = null; if (true) { uniqueValues = new HashSet <long>(); long count = 0; foreach (long?nv in values) { Debug.Assert(nv != null); long v = nv.Value; if (gcd != 1) { if (v < long.MinValue / 2 || v > long.MaxValue / 2) { // in that case v - minValue might overflow and make the GCD computation return // wrong results. Since these extreme values are unlikely, we just discard // GCD computation for them gcd = 1; } // minValue needs to be set first else if (count != 0) { gcd = MathUtil.Gcd(gcd, v - minValue); } } minValue = Math.Min(minValue, v); maxValue = Math.Max(maxValue, v); if (uniqueValues != null) { if (uniqueValues.Add(v)) { if (uniqueValues.Count > 256) { uniqueValues = null; } } } ++count; } Debug.Assert(count == MaxDoc); } if (uniqueValues != null) { // small number of unique values int bitsPerValue = PackedInts.BitsRequired(uniqueValues.Count - 1); FormatAndBits formatAndBits = PackedInts.FastestFormatAndBits(MaxDoc, bitsPerValue, AcceptableOverheadRatio); if (formatAndBits.bitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue) { Meta.WriteByte((byte)UNCOMPRESSED); // uncompressed foreach (long?nv in values) { Data.WriteByte(nv == null ? (byte)0 : (byte)(sbyte)nv.Value); } } else { Meta.WriteByte((byte)TABLE_COMPRESSED); // table-compressed //LUCENE TO-DO, ToArray had a parameter to start var decode = uniqueValues.ToArray(); var encode = new Dictionary <long, int>(); Data.WriteVInt(decode.Length); for (int i = 0; i < decode.Length; i++) { Data.WriteLong(decode[i]); encode[decode[i]] = i; } Meta.WriteVInt(PackedInts.VERSION_CURRENT); Data.WriteVInt(formatAndBits.format.id); Data.WriteVInt(formatAndBits.bitsPerValue); PackedInts.Writer writer = PackedInts.GetWriterNoHeader(Data, formatAndBits.format, MaxDoc, formatAndBits.bitsPerValue, PackedInts.DEFAULT_BUFFER_SIZE); foreach (long?nv in values) { writer.Add(encode[nv == null ? 0 : nv.Value]); } writer.Finish(); } } else if (gcd != 0 && gcd != 1) { Meta.WriteByte((byte)GCD_COMPRESSED); Meta.WriteVInt(PackedInts.VERSION_CURRENT); Data.WriteLong(minValue); Data.WriteLong(gcd); Data.WriteVInt(BLOCK_SIZE); var writer = new BlockPackedWriter(Data, BLOCK_SIZE); foreach (long?nv in values) { long value = nv == null ? 0 : nv.Value; writer.Add((value - minValue) / gcd); } writer.Finish(); } else { Meta.WriteByte((byte)DELTA_COMPRESSED); // delta-compressed Meta.WriteVInt(PackedInts.VERSION_CURRENT); Data.WriteVInt(BLOCK_SIZE); var writer = new BlockPackedWriter(Data, BLOCK_SIZE); foreach (long?nv in values) { writer.Add(nv == null ? 0 : nv.Value); } writer.Finish(); } }
/// <summary> /// Create a new <seealso cref="PagedMutable"/> instance. /// </summary> /// <param name="size"> the number of values to store. </param> /// <param name="pageSize"> the number of values per page </param> /// <param name="bitsPerValue"> the number of bits per value </param> /// <param name="acceptableOverheadRatio"> an acceptable overhead ratio </param> public PagedMutable(long size, int pageSize, int bitsPerValue, float acceptableOverheadRatio) : this(size, pageSize, PackedInts.FastestFormatAndBits(pageSize, bitsPerValue, acceptableOverheadRatio)) { FillPages(); }