Exemplo n.º 1
0
        private void FlushNumTerms(int totalFields)
        {
            int maxNumTerms = 0;

            foreach (DocData dd in PendingDocs)
            {
                foreach (FieldData fd in dd.Fields)
                {
                    maxNumTerms |= fd.NumTerms;
                }
            }
            int bitsRequired = PackedInts.BitsRequired(maxNumTerms);

            VectorsStream.WriteVInt(bitsRequired);
            PackedInts.Writer writer = PackedInts.GetWriterNoHeader(VectorsStream, PackedInts.Format.PACKED, totalFields, bitsRequired, 1);
            foreach (DocData dd in PendingDocs)
            {
                foreach (FieldData fd in dd.Fields)
                {
                    writer.Add(fd.NumTerms);
                }
            }
            Debug.Assert(writer.Ord() == totalFields - 1);
            writer.Finish();
        }
Exemplo n.º 2
0
 private void FlushFields(int totalFields, int[] fieldNums)
 {
     PackedInts.Writer writer = PackedInts.GetWriterNoHeader(VectorsStream, PackedInts.Format.PACKED, totalFields, PackedInts.BitsRequired(fieldNums.Length - 1), 1);
     foreach (DocData dd in PendingDocs)
     {
         foreach (FieldData fd in dd.Fields)
         {
             int fieldNumIndex = Array.BinarySearch(fieldNums, fd.FieldNum);
             Debug.Assert(fieldNumIndex >= 0);
             writer.Add(fieldNumIndex);
         }
     }
     writer.Finish();
 }
 private static void SaveInts(int[] values, int length, DataOutput @out)
 {
     Debug.Assert(length > 0);
     if (length == 1)
     {
         @out.WriteVInt(values[0]);
     }
     else
     {
         bool allEqual = true;
         for (int i = 1; i < length; ++i)
         {
             if (values[i] != values[0])
             {
                 allEqual = false;
                 break;
             }
         }
         if (allEqual)
         {
             @out.WriteVInt(0);
             @out.WriteVInt(values[0]);
         }
         else
         {
             long max = 0;
             for (int i = 0; i < length; ++i)
             {
                 max |= (uint)values[i];
             }
             int bitsRequired = PackedInts.BitsRequired(max);
             @out.WriteVInt(bitsRequired);
             PackedInts.Writer w = PackedInts.GetWriterNoHeader(@out, PackedInts.Format.PACKED, length, bitsRequired, 1);
             for (int i = 0; i < length; ++i)
             {
                 w.Add(values[i]);
             }
             w.Finish();
         }
     }
 }
Exemplo n.º 4
0
        /// <summary>
        /// Returns a sorted array containing unique field numbers </summary>
        private int[] FlushFieldNums()
        {
            SortedSet <int> fieldNums = new SortedSet <int>();

            foreach (DocData dd in PendingDocs)
            {
                foreach (FieldData fd in dd.Fields)
                {
                    fieldNums.Add(fd.FieldNum);
                }
            }

            int numDistinctFields = fieldNums.Count;

            Debug.Assert(numDistinctFields > 0);
            int bitsRequired = PackedInts.BitsRequired(fieldNums.Last());
            int token        = (Math.Min(numDistinctFields - 1, 0x07) << 5) | bitsRequired;

            VectorsStream.WriteByte((byte)(sbyte)token);
            if (numDistinctFields - 1 >= 0x07)
            {
                VectorsStream.WriteVInt(numDistinctFields - 1 - 0x07);
            }
            PackedInts.Writer writer = PackedInts.GetWriterNoHeader(VectorsStream, PackedInts.Format.PACKED, fieldNums.Count, bitsRequired, 1);
            foreach (int fieldNum in fieldNums)
            {
                writer.Add(fieldNum);
            }
            writer.Finish();

            int[] fns = new int[fieldNums.Count];
            int   i   = 0;

            foreach (int key in fieldNums)
            {
                fns[i++] = key;
            }
            return(fns);
        }
Exemplo n.º 5
0
        internal virtual void AddNumericField(FieldInfo field, IEnumerable <long?> values, bool optimizeStorage)
        {
            long count    = 0;
            long minValue = long.MaxValue;
            long maxValue = long.MinValue;
            long gcd      = 0;
            bool missing  = false;
            // TODO: more efficient?
            HashSet <long> uniqueValues = null;

            if (optimizeStorage)
            {
                uniqueValues = new HashSet <long>();

                foreach (long?nv in values)
                {
                    long v;
                    if (nv == null)
                    {
                        v       = 0;
                        missing = true;
                    }
                    else
                    {
                        v = nv.Value;
                    }

                    if (gcd != 1)
                    {
                        if (v < long.MinValue / 2 || v > long.MaxValue / 2)
                        {
                            // in that case v - minValue might overflow and make the GCD computation return
                            // wrong results. Since these extreme values are unlikely, we just discard
                            // GCD computation for them
                            gcd = 1;
                        } // minValue needs to be set first
                        else if (count != 0)
                        {
                            gcd = MathUtil.Gcd(gcd, v - minValue);
                        }
                    }

                    minValue = Math.Min(minValue, v);
                    maxValue = Math.Max(maxValue, v);

                    if (uniqueValues != null)
                    {
                        if (uniqueValues.Add(v))
                        {
                            if (uniqueValues.Count > 256)
                            {
                                uniqueValues = null;
                            }
                        }
                    }

                    ++count;
                }
            }
            else
            {
                foreach (var nv in values)
                {
                    ++count;
                }
            }

            long delta = maxValue - minValue;

            int format;

            if (uniqueValues != null && (delta < 0L || PackedInts.BitsRequired(uniqueValues.Count - 1) < PackedInts.BitsRequired(delta)) && count <= int.MaxValue)
            {
                format = TABLE_COMPRESSED;
            }
            else if (gcd != 0 && gcd != 1)
            {
                format = GCD_COMPRESSED;
            }
            else
            {
                format = DELTA_COMPRESSED;
            }
            Meta.WriteVInt(field.Number);
            Meta.WriteByte((byte)Lucene45DocValuesFormat.NUMERIC);
            Meta.WriteVInt(format);
            if (missing)
            {
                Meta.WriteLong(Data.FilePointer);
                WriteMissingBitset(values);
            }
            else
            {
                Meta.WriteLong(-1L);
            }
            Meta.WriteVInt(PackedInts.VERSION_CURRENT);
            Meta.WriteLong(Data.FilePointer);
            Meta.WriteVLong(count);
            Meta.WriteVInt(BLOCK_SIZE);

            switch (format)
            {
            case GCD_COMPRESSED:
                Meta.WriteLong(minValue);
                Meta.WriteLong(gcd);
                BlockPackedWriter quotientWriter = new BlockPackedWriter(Data, BLOCK_SIZE);
                foreach (long?nv in values)
                {
                    long value = nv == null ? 0 : nv.Value;
                    quotientWriter.Add((value - minValue) / gcd);
                }
                quotientWriter.Finish();
                break;

            case DELTA_COMPRESSED:
                BlockPackedWriter writer = new BlockPackedWriter(Data, BLOCK_SIZE);
                foreach (long?nv in values)
                {
                    writer.Add(nv == null ? 0 : nv.Value);
                }
                writer.Finish();
                break;

            case TABLE_COMPRESSED:
                long[] decode = uniqueValues.ToArray();    //LUCENE TO-DO Hadd oparamerter before
                Dictionary <long, int> encode = new Dictionary <long, int>();
                Meta.WriteVInt(decode.Length);
                for (int i = 0; i < decode.Length; i++)
                {
                    Meta.WriteLong(decode[i]);
                    encode[decode[i]] = i;
                }
                int bitsRequired             = PackedInts.BitsRequired(uniqueValues.Count - 1);
                PackedInts.Writer ordsWriter = PackedInts.GetWriterNoHeader(Data, PackedInts.Format.PACKED, (int)count, bitsRequired, PackedInts.DEFAULT_BUFFER_SIZE);
                foreach (long?nv in values)
                {
                    ordsWriter.Add(encode[nv == null ? 0 : nv.Value]);
                }
                ordsWriter.Finish();
                break;

            default:
                throw new InvalidOperationException();
            }
        }
Exemplo n.º 6
0
        private void FlushFlags(int totalFields, int[] fieldNums)
        {
            // check if fields always have the same flags
            bool nonChangingFlags = true;

            int[] fieldFlags = new int[fieldNums.Length];
            Arrays.Fill(fieldFlags, -1);
            bool breakOuterLoop;

            foreach (DocData dd in PendingDocs)
            {
                breakOuterLoop = false;
                foreach (FieldData fd in dd.Fields)
                {
                    int fieldNumOff = Array.BinarySearch(fieldNums, fd.FieldNum);
                    Debug.Assert(fieldNumOff >= 0);
                    if (fieldFlags[fieldNumOff] == -1)
                    {
                        fieldFlags[fieldNumOff] = fd.Flags;
                    }
                    else if (fieldFlags[fieldNumOff] != fd.Flags)
                    {
                        nonChangingFlags = false;
                        breakOuterLoop   = true;
                    }
                }
                if (breakOuterLoop)
                {
                    break;
                }
            }

            if (nonChangingFlags)
            {
                // write one flag per field num
                VectorsStream.WriteVInt(0);
                PackedInts.Writer writer = PackedInts.GetWriterNoHeader(VectorsStream, PackedInts.Format.PACKED, fieldFlags.Length, FLAGS_BITS, 1);
                foreach (int flags in fieldFlags)
                {
                    Debug.Assert(flags >= 0);
                    writer.Add(flags);
                }
                Debug.Assert(writer.Ord() == fieldFlags.Length - 1);
                writer.Finish();
            }
            else
            {
                // write one flag for every field instance
                VectorsStream.WriteVInt(1);
                PackedInts.Writer writer = PackedInts.GetWriterNoHeader(VectorsStream, PackedInts.Format.PACKED, totalFields, FLAGS_BITS, 1);
                foreach (DocData dd in PendingDocs)
                {
                    foreach (FieldData fd in dd.Fields)
                    {
                        writer.Add(fd.Flags);
                    }
                }
                Debug.Assert(writer.Ord() == totalFields - 1);
                writer.Finish();
            }
        }
        internal virtual void AddNumericField(FieldInfo field, IEnumerable <long> values, bool optimizeStorage)
        {
            Meta.WriteVInt(field.Number);
            Meta.WriteByte(Lucene42DocValuesProducer.NUMBER);
            Meta.WriteLong(Data.FilePointer);
            long minValue = long.MaxValue;
            long maxValue = long.MinValue;
            long gcd      = 0;
            // TODO: more efficient?
            HashSet <long> uniqueValues = null;

            if (optimizeStorage)
            {
                uniqueValues = new HashSet <long>();

                long count = 0;
                foreach (long nv in values)
                {
                    // TODO: support this as MemoryDVFormat (and be smart about missing maybe)
                    long v = nv == null ? 0 : (long)nv;

                    if (gcd != 1)
                    {
                        if (v < long.MinValue / 2 || v > long.MaxValue / 2)
                        {
                            // in that case v - minValue might overflow and make the GCD computation return
                            // wrong results. Since these extreme values are unlikely, we just discard
                            // GCD computation for them
                            gcd = 1;
                        } // minValue needs to be set first
                        else if (count != 0)
                        {
                            gcd = MathUtil.Gcd(gcd, v - minValue);
                        }
                    }

                    minValue = Math.Min(minValue, v);
                    maxValue = Math.Max(maxValue, v);

                    if (uniqueValues != null)
                    {
                        if (uniqueValues.Add(v))
                        {
                            if (uniqueValues.Count > 256)
                            {
                                uniqueValues = null;
                            }
                        }
                    }

                    ++count;
                }
                Debug.Assert(count == MaxDoc);
            }

            if (uniqueValues != null)
            {
                // small number of unique values
                int           bitsPerValue  = PackedInts.BitsRequired(uniqueValues.Count - 1);
                FormatAndBits formatAndBits = PackedInts.FastestFormatAndBits(MaxDoc, bitsPerValue, AcceptableOverheadRatio);
                if (formatAndBits.bitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue)
                {
                    Meta.WriteByte(Lucene42DocValuesProducer.UNCOMPRESSED); // uncompressed
                    foreach (long nv in values)
                    {
                        Data.WriteByte(nv == null ? (byte)0 : (byte)nv);
                    }
                }
                else
                {
                    Meta.WriteByte(Lucene42DocValuesProducer.TABLE_COMPRESSED); // table-compressed
                    long[] decode = uniqueValues.ToArray(/*new long?[uniqueValues.Count]*/);
                    Dictionary <long, int> encode = new Dictionary <long, int>();
                    Data.WriteVInt(decode.Length);
                    for (int i = 0; i < decode.Length; i++)
                    {
                        Data.WriteLong(decode[i]);
                        encode[decode[i]] = i;
                    }

                    Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                    Data.WriteVInt(formatAndBits.format.id);
                    Data.WriteVInt(formatAndBits.bitsPerValue);

                    PackedInts.Writer writer = PackedInts.GetWriterNoHeader(Data, formatAndBits.format, MaxDoc, formatAndBits.bitsPerValue, PackedInts.DEFAULT_BUFFER_SIZE);
                    foreach (long nv in values)
                    {
                        writer.Add(encode[nv == null ? 0 : (long)nv]);
                    }
                    writer.Finish();
                }
            }
            else if (gcd != 0 && gcd != 1)
            {
                Meta.WriteByte(Lucene42DocValuesProducer.GCD_COMPRESSED);
                Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                Data.WriteLong(minValue);
                Data.WriteLong(gcd);
                Data.WriteVInt(Lucene42DocValuesProducer.BLOCK_SIZE);

                BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
                foreach (long nv in values)
                {
                    long value = nv == null ? 0 : (long)nv;
                    writer.Add((value - minValue) / gcd);
                }
                writer.Finish();
            }
            else
            {
                Meta.WriteByte(Lucene42DocValuesProducer.DELTA_COMPRESSED); // delta-compressed

                Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                Data.WriteVInt(Lucene42DocValuesProducer.BLOCK_SIZE);

                BlockPackedWriter writer = new BlockPackedWriter(Data, Lucene42DocValuesProducer.BLOCK_SIZE);
                foreach (long nv in values)
                {
                    writer.Add(nv == null ? 0 : (long)nv);
                }
                writer.Finish();
            }
        }
Exemplo n.º 8
0
        public override void AddNumericField(FieldInfo field, IEnumerable <long?> values)
        {
            Meta.WriteVInt(field.Number);
            Meta.WriteByte((byte)NUMBER);
            Meta.WriteLong(Data.FilePointer);
            long minValue = long.MaxValue;
            long maxValue = long.MinValue;
            long gcd      = 0;
            // TODO: more efficient?
            HashSet <long> uniqueValues = null;

            if (true)
            {
                uniqueValues = new HashSet <long>();

                long count = 0;
                foreach (long?nv in values)
                {
                    Debug.Assert(nv != null);
                    long v = nv.Value;

                    if (gcd != 1)
                    {
                        if (v < long.MinValue / 2 || v > long.MaxValue / 2)
                        {
                            // in that case v - minValue might overflow and make the GCD computation return
                            // wrong results. Since these extreme values are unlikely, we just discard
                            // GCD computation for them
                            gcd = 1;
                        } // minValue needs to be set first
                        else if (count != 0)
                        {
                            gcd = MathUtil.Gcd(gcd, v - minValue);
                        }
                    }

                    minValue = Math.Min(minValue, v);
                    maxValue = Math.Max(maxValue, v);

                    if (uniqueValues != null)
                    {
                        if (uniqueValues.Add(v))
                        {
                            if (uniqueValues.Count > 256)
                            {
                                uniqueValues = null;
                            }
                        }
                    }

                    ++count;
                }
                Debug.Assert(count == MaxDoc);
            }

            if (uniqueValues != null)
            {
                // small number of unique values
                int           bitsPerValue  = PackedInts.BitsRequired(uniqueValues.Count - 1);
                FormatAndBits formatAndBits = PackedInts.FastestFormatAndBits(MaxDoc, bitsPerValue, AcceptableOverheadRatio);
                if (formatAndBits.bitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue)
                {
                    Meta.WriteByte((byte)UNCOMPRESSED); // uncompressed
                    foreach (long?nv in values)
                    {
                        Data.WriteByte(nv == null ? (byte)0 : (byte)(sbyte)nv.Value);
                    }
                }
                else
                {
                    Meta.WriteByte((byte)TABLE_COMPRESSED); // table-compressed
                    //LUCENE TO-DO, ToArray had a parameter to start
                    var decode = uniqueValues.ToArray();
                    var encode = new Dictionary <long, int>();
                    Data.WriteVInt(decode.Length);
                    for (int i = 0; i < decode.Length; i++)
                    {
                        Data.WriteLong(decode[i]);
                        encode[decode[i]] = i;
                    }

                    Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                    Data.WriteVInt(formatAndBits.format.id);
                    Data.WriteVInt(formatAndBits.bitsPerValue);

                    PackedInts.Writer writer = PackedInts.GetWriterNoHeader(Data, formatAndBits.format, MaxDoc, formatAndBits.bitsPerValue, PackedInts.DEFAULT_BUFFER_SIZE);
                    foreach (long?nv in values)
                    {
                        writer.Add(encode[nv == null ? 0 : nv.Value]);
                    }
                    writer.Finish();
                }
            }
            else if (gcd != 0 && gcd != 1)
            {
                Meta.WriteByte((byte)GCD_COMPRESSED);
                Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                Data.WriteLong(minValue);
                Data.WriteLong(gcd);
                Data.WriteVInt(BLOCK_SIZE);

                var writer = new BlockPackedWriter(Data, BLOCK_SIZE);
                foreach (long?nv in values)
                {
                    long value = nv == null ? 0 : nv.Value;
                    writer.Add((value - minValue) / gcd);
                }
                writer.Finish();
            }
            else
            {
                Meta.WriteByte((byte)DELTA_COMPRESSED); // delta-compressed

                Meta.WriteVInt(PackedInts.VERSION_CURRENT);
                Data.WriteVInt(BLOCK_SIZE);

                var writer = new BlockPackedWriter(Data, BLOCK_SIZE);
                foreach (long?nv in values)
                {
                    writer.Add(nv == null ? 0 : nv.Value);
                }
                writer.Finish();
            }
        }
Exemplo n.º 9
0
        private void WriteBlock()
        {
            Debug.Assert(BlockChunks > 0);
            FieldsIndexOut.WriteVInt(BlockChunks);

            // The trick here is that we only store the difference from the average start
            // pointer or doc base, this helps save bits per value.
            // And in order to prevent a few chunks that would be far from the average to
            // raise the number of bits per value for all of them, we only encode blocks
            // of 1024 chunks at once
            // See LUCENE-4512

            // doc bases
            int avgChunkDocs;

            if (BlockChunks == 1)
            {
                avgChunkDocs = 0;
            }
            else
            {
                avgChunkDocs = (int)Math.Round((float)(BlockDocs - DocBaseDeltas[BlockChunks - 1]) / (BlockChunks - 1));
            }
            FieldsIndexOut.WriteVInt(TotalDocs - BlockDocs); // docBase
            FieldsIndexOut.WriteVInt(avgChunkDocs);
            int  docBase  = 0;
            long maxDelta = 0;

            for (int i = 0; i < BlockChunks; ++i)
            {
                int delta = docBase - avgChunkDocs * i;
                maxDelta |= MoveSignToLowOrderBit(delta);
                docBase  += DocBaseDeltas[i];
            }

            int bitsPerDocBase = PackedInts.BitsRequired(maxDelta);

            FieldsIndexOut.WriteVInt(bitsPerDocBase);
            PackedInts.Writer writer = PackedInts.GetWriterNoHeader(FieldsIndexOut, PackedInts.Format.PACKED, BlockChunks, bitsPerDocBase, 1);
            docBase = 0;
            for (int i = 0; i < BlockChunks; ++i)
            {
                long delta = docBase - avgChunkDocs * i;
                Debug.Assert(PackedInts.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue());
                writer.Add(MoveSignToLowOrderBit(delta));
                docBase += DocBaseDeltas[i];
            }
            writer.Finish();

            // start pointers
            FieldsIndexOut.WriteVLong(FirstStartPointer);
            long avgChunkSize;

            if (BlockChunks == 1)
            {
                avgChunkSize = 0;
            }
            else
            {
                avgChunkSize = (MaxStartPointer - FirstStartPointer) / (BlockChunks - 1);
            }
            FieldsIndexOut.WriteVLong(avgChunkSize);
            long startPointer = 0;

            maxDelta = 0;
            for (int i = 0; i < BlockChunks; ++i)
            {
                startPointer += StartPointerDeltas[i];
                long delta = startPointer - avgChunkSize * i;
                maxDelta |= MoveSignToLowOrderBit(delta);
            }

            int bitsPerStartPointer = PackedInts.BitsRequired(maxDelta);

            FieldsIndexOut.WriteVInt(bitsPerStartPointer);
            writer       = PackedInts.GetWriterNoHeader(FieldsIndexOut, PackedInts.Format.PACKED, BlockChunks, bitsPerStartPointer, 1);
            startPointer = 0;
            for (int i = 0; i < BlockChunks; ++i)
            {
                startPointer += StartPointerDeltas[i];
                long delta = startPointer - avgChunkSize * i;
                Debug.Assert(PackedInts.BitsRequired(MoveSignToLowOrderBit(delta)) <= writer.BitsPerValue());
                writer.Add(MoveSignToLowOrderBit(delta));
            }
            writer.Finish();
        }